text
stringlengths 2
6.14k
|
|---|
#include "black-configurator-factory.h"
#include "black-configurator.h"
#include "dgr2Macro.h"
USING_DGR2;
extern Logger* g_local_logger;
__BCONF_BEGIN__
extern ULONG g_lock_num;
extern ULONG g_configurator_num;
HRESULT BlackConfiguratorFactory::QueryInterface(const IID& iid, void **ppv){
if (iid == IID_IUnknown){
*ppv = (IUnknown *) this;
((IUnknown *)(*ppv))->AddRef();
} else if (iid == IID_IClassFactory){
*ppv = (IClassFactory *) this;
((IClassFactory *)(*ppv))->AddRef();
} else {
*ppv = NULL;
return E_NOINTERFACE;
}
return S_OK;
}
BlackConfiguratorFactory::BlackConfiguratorFactory()
:ref_num_(0){
SXLOG_INF(g_local_logger) << _X("New BlackConfiguratorFactory Object Created!") << LBT << END;
}
BlackConfiguratorFactory::~BlackConfiguratorFactory(){
SXLOG_INF(g_local_logger) << _X("BlackConfiguratorFactory Object Destroy!") << LBT << END;
}
HRESULT BlackConfiguratorFactory::LockServer(BOOL block){
block ? g_lock_num++ : g_lock_num--;
return NOERROR;
}
HRESULT BlackConfiguratorFactory::CreateInstance(IUnknown* pUnknownOuter, const IID& iid, void **ppv){
*ppv = NULL;
HRESULT hr = E_OUTOFMEMORY;
if (NULL != pUnknownOuter)
return CLASS_E_NOAGGREGATION;
BlackConfigurator * new_configurator = new BlackConfigurator();
if (NULL == new_configurator)
return hr;
hr = new_configurator->QueryInterface(iid, ppv);
if (hr != S_OK) {
g_configurator_num--; // Reference count g_cDictionary be added in constructor
delete new_configurator;
}
return hr;
}
ULONG BlackConfiguratorFactory::Release(){
ref_num_--;
if (ref_num_ == 0) {
delete this;
return 0;
}
return ref_num_;
}
ULONG BlackConfiguratorFactory::AddRef(){
return ++ref_num_;
}
__BCONF_END__
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of AudioLazy, the signal processing Python package.
# Copyright (C) 2012-2014 Danilo de Jesus da Silva Bellini
#
# AudioLazy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Created on Wed May 01 2013
# danilo [dot] bellini [at] gmail [dot] com
"""
Pitch follower via DFT peak with Tkinter GUI
"""
# ------------------------
# AudioLazy pitch follower
# ------------------------
import sys
from audiolazy import (tostream, AudioIO, freq2str, sHz, chunks,
lowpass, envelope, pi, thub, Stream, maverage)
from numpy.fft import rfft
def limiter(sig, threshold=.1, size=256, env=envelope.rms, cutoff=pi/2048):
sig = thub(sig, 2)
return sig * Stream( 1. if el <= threshold else threshold / el
for el in maverage(size)(env(sig, cutoff=cutoff)) )
@tostream
def dft_pitch(sig, size=2048, hop=None):
for blk in Stream(sig).blocks(size=size, hop=hop):
dft_data = rfft(blk)
idx, vmax = max(enumerate(dft_data),
key=lambda el: abs(el[1]) / (2 * el[0] / size + 1)
)
yield 2 * pi * idx / size
def pitch_from_mic(upd_time_in_ms):
rate = 44100
s, Hz = sHz(rate)
api = sys.argv[1] if sys.argv[1:] else None # Choose API via command-line
chunks.size = 1 if api == "jack" else 16
with AudioIO(api=api) as recorder:
snd = recorder.record(rate=rate)
sndlow = lowpass(400 * Hz)(limiter(snd, cutoff=20 * Hz))
hop = int(upd_time_in_ms * 1e-3 * s)
for pitch in freq2str(dft_pitch(sndlow, size=2*hop, hop=hop) / Hz):
yield pitch
# ----------------
# GUI with tkinter
# ----------------
if __name__ == "__main__":
try:
import tkinter
except ImportError:
import Tkinter as tkinter
import threading
import re
# Window (Tk init), text label and button
tk = tkinter.Tk()
tk.title(__doc__.strip().splitlines()[0])
lbldata = tkinter.StringVar(tk)
lbltext = tkinter.Label(tk, textvariable=lbldata, font=("Purisa", 72),
width=10)
lbltext.pack(expand=True, fill=tkinter.BOTH)
btnclose = tkinter.Button(tk, text="Close", command=tk.destroy,
default="active")
btnclose.pack(fill=tkinter.X)
# Needed data
regex_note = re.compile(r"^([A-Gb#]*-?[0-9]*)([?+-]?)(.*?%?)$")
upd_time_in_ms = 200
# Update functions for each thread
def upd_value(): # Recording thread
pitches = iter(pitch_from_mic(upd_time_in_ms))
while not tk.should_finish:
tk.value = next(pitches)
def upd_timer(): # GUI mainloop thread
lbldata.set("\n".join(regex_note.findall(tk.value)[0]))
tk.after(upd_time_in_ms, upd_timer)
# Multi-thread management initialization
tk.should_finish = False
tk.value = freq2str(0) # Starting value
lbldata.set(tk.value)
tk.upd_thread = threading.Thread(target=upd_value)
# Go
tk.upd_thread.start()
tk.after_idle(upd_timer)
tk.mainloop()
tk.should_finish = True
tk.upd_thread.join()
|
#include "access.h"
#include "../base.h"
#include "../errno.h"
#include <linux-syscalls/linux.h>
long sys_access(const char* filename, int amode)
{
int ret;
#ifdef __NR_access
ret = LINUX_SYSCALL(__NR_access, filename, amode);
#else
ret = LINUX_SYSCALL(__NR_faccessat, LINUX_AT_FDCWD, filename, amode, 0);
#endif
if (ret < 0)
ret = errno_linux_to_bsd(ret);
return ret;
}
|
//
//
// PayOffConcrete.h
//
//
#ifndef PAYOFF_CONCRETE_H
#define PAYOFF_CONCRETE_H
#include "PayOff.h"
class PayOffCall : public PayOff
{
public:
PayOffCall(double Strike_);
virtual double operator()(double Spot) const;
virtual ~PayOffCall(){}
virtual PayOff* clone() const; // change to PayOffCall* if your compiler is up to date
private:
double Strike;
};
class PayOffPut : public PayOff
{
public:
PayOffPut(double Strike_);
virtual double operator()(double Spot) const;
virtual ~PayOffPut(){}
virtual PayOff* clone() const;
private:
double Strike;
};
#endif
|
# pylint: skip-file
# pylint: disable=too-many-instance-attributes
class OCRoute(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'route'
# pylint allows 5
# pylint: disable=too-many-arguments
def __init__(self,
config,
verbose=False):
''' Constructor for OCVolume '''
super(OCRoute, self).__init__(config.namespace, config.kubeconfig)
self.config = config
self.namespace = config.namespace
self._route = None
@property
def route(self):
''' property function service'''
if not self._route:
self.get()
return self._route
@route.setter
def route(self, data):
''' setter function for yedit var '''
self._route = data
def exists(self):
''' return whether a volume exists '''
if self.route:
return True
return False
def get(self):
'''return volume information '''
result = self._get(self.kind, self.config.name)
if result['returncode'] == 0:
self.route = Route(content=result['results'][0])
elif 'routes \"%s\" not found' % self.config.name in result['stderr']:
result['returncode'] = 0
result['results'] = [{}]
return result
def delete(self):
'''delete the object'''
return self._delete(self.kind, self.config.name)
def create(self):
'''create the object'''
return self._create_from_content(self.config.name, self.config.data)
def update(self):
'''update the object'''
# need to update the tls information and the service name
return self._replace_content(self.kind, self.config.name, self.config.data)
def needs_update(self):
''' verify an update is needed '''
skip = []
return not Utils.check_def_equal(self.config.data, self.route.yaml_dict, skip_keys=skip, debug=True)
|
#ifndef WLBITEMINVENTORYLOG_H
#define WLBITEMINVENTORYLOG_H
#include <TaoApiCpp/TaoDomain.h>
#include <QDateTime>
#include <QString>
/**
* @brief 库存变更记录
*
* @author sd44 <[email protected]>
*/
class WlbItemInventoryLog : public TaoDomain
{
public:
virtual ~WlbItemInventoryLog() { }
QString getBatchCode() const;
void setBatchCode (QString batchCode);
QDateTime getGmtCreate() const;
void setGmtCreate (QDateTime gmtCreate);
qlonglong getId() const;
void setId (qlonglong id);
QString getInventType() const;
void setInventType (QString inventType);
qlonglong getItemId() const;
void setItemId (qlonglong itemId);
QString getOpType() const;
void setOpType (QString opType);
qlonglong getOpUserId() const;
void setOpUserId (qlonglong opUserId);
QString getOrderCode() const;
void setOrderCode (QString orderCode);
qlonglong getOrderItemId() const;
void setOrderItemId (qlonglong orderItemId);
qlonglong getQuantity() const;
void setQuantity (qlonglong quantity);
QString getRemark() const;
void setRemark (QString remark);
qlonglong getResultQuantity() const;
void setResultQuantity (qlonglong resultQuantity);
QString getStoreCode() const;
void setStoreCode (QString storeCode);
qlonglong getUserId() const;
void setUserId (qlonglong userId);
virtual void parseResponse();
private:
/**
* @brief 批次号
**/
QString batchCode;
/**
* @brief 创建日期
**/
QDateTime gmtCreate;
/**
* @brief 库存变更ID
**/
qlonglong id;
/**
* @brief VENDIBLE 1-可销售;
FREEZE 201-冻结库存;
ONWAY 301-在途库存;
DEFECT 101-残存品;
ENGINE_DAMAGE 102-机损;
BOX_DAMAGE 103-箱损
**/
QString inventType;
/**
* @brief 商品ID
**/
qlonglong itemId;
/**
* @brief 库存操作作类型
CHU_KU 1-出库
RU_KU 2-入库
FREEZE 3-冻结
THAW 4-解冻
CHECK_FREEZE 5-冻结确认
CHANGE_KU 6-库存类型变更
**/
QString opType;
/**
* @brief 库存操作者ID
**/
qlonglong opUserId;
/**
* @brief 订单号
**/
QString orderCode;
/**
* @brief 订单商品ID
**/
qlonglong orderItemId;
/**
* @brief 处理数量变化值
**/
qlonglong quantity;
/**
* @brief 备注
**/
QString remark;
/**
* @brief 结果值
**/
qlonglong resultQuantity;
/**
* @brief 仓库编码
**/
QString storeCode;
/**
* @brief 用户ID
**/
qlonglong userId;
};
#endif /* WLBITEMINVENTORYLOG_H */
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import numpy
from random import randrange
from cerebro2.patcher import Patcher
from nupic.research.spatial_pooler import SpatialPooler
def run():
sp = SpatialPooler(
inputDimensions=[10, 15],
columnDimensions=[5, 10],
potentialRadius=2,
potentialPct=0.5,
synPermInactiveDec=0.1,
synPermActiveInc=0.1,
synPermConnected=0.1,
localAreaDensity=0.1,
numActiveColumnsPerInhArea=-1,
globalInhibition=True
)
inputArray = numpy.zeros(sp.getNumInputs())
activeArray = numpy.zeros(sp.getNumColumns())
Patcher().patchSP(sp)
for i in range(100):
generateInput(inputArray)
sp.compute(inputArray, True, activeArray)
print "Ran iteration:\t{0}".format(i)
def generateInput(inputArray):
inputArray[0:] = 0
for i in range(inputArray.size):
inputArray[i] = randrange(2)
if __name__ == "__main__":
run()
|
from hubblestack.hangtime import HangTime, hangtime_wrapper
import time
import signal
import pytest
def test_basic():
bang = set()
assert signal.getsignal(signal.SIGALRM) == signal.SIG_DFL
try:
with HangTime(timeout=1, tag=10):
time.sleep(0.5)
except HangTime as ht:
bang.add(ht.tag)
# if we forget to clear the remaining timer
# we'll alarmclock sys.exit here
# not a real test, but the tests won't pass if we sys.exit
time.sleep(1)
assert bang == set()
assert signal.getsignal(signal.SIGALRM) == signal.SIG_DFL
try:
with HangTime(timeout=1, tag=13):
time.sleep(1.5)
except HangTime as ht:
bang.add(ht.tag)
assert bang == {13,}
assert signal.getsignal(signal.SIGALRM) == signal.SIG_DFL
def test_inner_timeout():
bang = set()
assert signal.getsignal(signal.SIGALRM) == signal.SIG_DFL
try:
with HangTime(timeout=2, tag=10):
with HangTime(timeout=1, tag=11):
time.sleep(1.5)
except HangTime as ht:
bang.add(ht.tag)
try:
with HangTime(timeout=2, tag=12):
try:
with HangTime(timeout=1, tag=13):
time.sleep(1.5)
except HangTime as ht:
bang.add(ht.tag)
except HangTime as ht:
bang.add(ht.tag)
assert bang == {11,13}
assert signal.getsignal(signal.SIGALRM) == signal.SIG_DFL
def test_outer_timeout():
bang = set()
assert signal.getsignal(signal.SIGALRM) == signal.SIG_DFL
try:
with HangTime(timeout=1, tag='this-is-tag'):
with HangTime(timeout=0.7, tag=11):
time.sleep(0.2)
time.sleep(1)
except HangTime as ht:
bang.add(ht.tag)
try:
with HangTime(timeout=1, tag=12):
try:
with HangTime(timeout=0.7, tag=13):
time.sleep(0.2)
except HangTime as ht:
bang.add(ht.tag)
time.sleep(1)
except HangTime as ht:
bang.add(ht.tag)
assert bang == {'this-is-tag', 12}
assert signal.getsignal(signal.SIGALRM) == signal.SIG_DFL
def test_wrapper():
@hangtime_wrapper(timeout=1)
def blah(a):
try:
time.sleep(a)
except:
return "timed out"
return "did not time out"
assert blah(0.5) == "did not time out"
assert blah(1.5) == "timed out"
# Salt ends up catching the HangTime exceptions during the grains refreshes. Any
# attempt to catch them with try/except with wrappers in hubblestack.daemon will
# fail. This presents two problems:
#
# 1. The grains will appear to die due to a HangTime and will be missing after
# the refresh
#
# 2. After the HangTime presents an exception, any other hanging grains will
# continue to hang
#
def test_fake_refresh_grains():
t1 = time.time()
@hangtime_wrapper(timeout=0.25, repeats=True)
def fake_refresh_grains(a,b):
x = 0
for i in range(a):
try:
time.sleep(b)
except:
x += 1
return x
x = fake_refresh_grains(5, 2) # five two second sleeps
target_time = 0.25 * 5 # but each should time out after 0.25s
t2 = time.time()
dt = t2-t1
assert x == 5
assert dt == pytest.approx(target_time, rel=1e-1)
|
/* ner: src/view_manager.hh
*
* Copyright (c) 2010 Michael Forney
*
* This file is a part of ner.
*
* ner is free software: you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 3, as published by the Free
* Software Foundation.
*
* ner is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* ner. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef NER_VIEW_MANAGER_H
#define NER_VIEW_MANAGER_H 1
#include <vector>
#include <memory>
#include "input_handler.hh"
class View;
/**
* Manages the currently active Views.
*
* This class is a singleton.
*/
class ViewManager : public InputHandler
{
public:
static ViewManager & instance()
{
return *_instance;
}
ViewManager();
~ViewManager();
/**
* Handles the given input sequence.
*
* First, the ViewManager itself will attempt to handle the sequence,
* and if it cannot, will try with the currently active view.
*
* \param sequence The sequence to handle.
*/
virtual InputHandler::HandleResult handleKeySequence(const std::vector<int> & sequence);
/**
* Adds the view to the managed views, then sets it as the active view.
*
* \param view The view to manage
*/
void addView(const std::shared_ptr<View> & view);
/**
* Closes the active view.
*
* The view will be deleted.
*/
void closeActiveView();
/**
* Closes all views.
*/
void close_all_views();
void update();
void refresh();
void resize();
const View & activeView() const;
private:
static ViewManager * _instance;
void openView(int index);
void closeView(int index);
std::shared_ptr<View> _activeView;
std::vector<std::shared_ptr<View>> _views;
friend class ViewView;
};
#endif
// vim: fdm=syntax fo=croql et sw=4 sts=4 ts=8
|
# Convert a list of hex values into decimal values
# Update from labview_parse_1
# - now includes the option to keep only the first n columns of the original data file,
# where n is the second user input
# Arguments:
# 1 - unparsed input file name
# 2 - number of columns to keep from the original data file
import sys
from time import time
import numpy as np
import matplotlib.pyplot as plt
def mesytec_parse(filename,numberColumnsKeep):
FILEEXTENSIONLENGTH = 4
NUMBEROFINPUTS = 32 # inherent to the MADC system
dataOrder = [0,1,16,17,8,9,24,25,2,3,18,19,10,11,26,27, \
4,5,20,21,12,13,28,29,6,7,22,23,14,15,30,31]
filename = sys.argv[1]
if len(sys.argv) == 3:
numberColumnsKeep = int(sys.argv[2])
else:
numberColumnsKeep = NUMBEROFINPUTS
outfilename = filename[:-FILEEXTENSIONLENGTH]+'_parsed.txt'
initialTime = time()
with open(filename) as f:
with open(outfilename,'w') as of:
while True:
currLine = f.readline()
if currLine == '':
# print('Output file written to',outfilename)
break
if currLine == '4040\n': # marks end of header
# print(previousLine) # DEBUGGING
numData = int(previousLine.split()[0][-2:],16) - 1 # convert last 2 bits to decimal,
# -1 because of end-header
# print(numData) # DEBUGGING
batchData = [0]*NUMBEROFINPUTS
badBatch = False
# for i in range(numData):
for i in range(2):
# print(f.tell())
dataLine = f.readline()
dataidLine = f.readline()
data = int(dataLine.split()[0],16)
dataid = int(dataidLine.split()[0][-2:],16)
if not dataid == dataOrder[i]:
badBatch = True
break
batchData[dataid] = data
if not badBatch:
# print(batchData)
for bd in batchData:
of.write(str(bd)+'\t')
of.write('\n')
previousLine = currLine # store previous line for later reference
elapsedTime = time() - initialTime
print('File written to',outfilename)
print(round(elapsedTime,3),'seconds taken to parse.')
return outfilename
# -----------------------------------------------------------------
# create 2D histogram, where histogram height is displayed as color
# Arguments:
# 1 - data filename
# 2 - number of bins
# 3 - figure title
# 4 - cutoff for # of stds (default 4)
def readParsedFile(filename):
xList = []
yList = []
numSTDs = 4
with open(filename) as f:
while True:
currLine = f.readline()
if currLine == '':
break
xList.append(int(currLine.split()[0]))
yList.append(int(currLine.split()[1]))
return xList, yList
def histogram_2d(inputfilename,nbins,figureTitle,stds):
FILEEXTENSIONLENGTH = 4
DEFAULTSTDS = 5
# #________________________________________________|
# # Inputs #|
# inputfilename = sys.argv[1] #|
# nbins = int(sys.argv[2]) #|
# figureTitle = sys.argv[3] #|
# if len(sys.argv) == 5: #|
# stds = float(sys.argv[4]) #|
# else: #|
# stds = DEFAULTSTDS #|
# #________________________________________________|
figureName = inputfilename[:-FILEEXTENSIONLENGTH] + '_plot.png'
x, y = readParsedFile(inputfilename)
stdX = np.std(x)
meanX = np.mean(x)
maxX = meanX + (stdX * stds)
minX = meanX - (stdX * stds)
# maxX = 3000
# minX = 0
stdY = np.std(y)
meanY = np.mean(y)
maxY = meanY + (stdY * stds)
minY = meanY - (stdY * stds)
# maxY = 3000
# minY = 0
trimmedX = []
trimmedY = []
for i, j in zip(x,y):
if i < minX or i > maxX or j < minY or j > maxY:
continue
trimmedX.append(i)
trimmedY.append(j)
H, xedges, yedges = np.histogram2d(trimmedX, trimmedY, bins = nbins)
H = np.rot90(H)
H = np.flipud(H)
Hmasked = np.ma.masked_where(H==0,H)
fig = plt.figure()
plt.set_cmap("spectral")
plt.pcolormesh(xedges,yedges,Hmasked)
plt.ylabel('TAC')
plt.xlabel('Amplitude')
plt.title(figureTitle)
cbar = plt.colorbar()
plt.savefig(figureName)
print('Figure saved as', figureName)
|
from unittest.mock import Mock, patch
import pytest
from jsonrpcclient.client import Client, request_log, response_log
from jsonrpcclient.exceptions import ReceivedErrorResponseError
from jsonrpcclient.requests import Request
from jsonrpcclient.response import Response
from testfixtures import LogCapture, StringComparison
class DummyClient(Client):
"""A dummy client for testing the abstract Client class"""
def send_message(self, request, response_expected):
return Response('{"jsonrpc": "2.0", "result": 1, "id": 1}')
class TestLogRequest:
def test(self, *_):
with LogCapture() as capture:
DummyClient().log_request('{"jsonrpc": "2.0", "method": "foo"}')
capture.check(
(
"jsonrpcclient.client.request",
"INFO",
StringComparison(r'.*"method": "foo".*'),
)
)
def test_trimmed(self):
req = '{"jsonrpc": "2.0", "method": "go", "params": {"foo": "%s"}}' % (
"foo" * 100,
)
with LogCapture() as capture:
DummyClient().log_request(req, trim_log_values=True)
capture.check(
(
"jsonrpcclient.client.request",
"INFO",
StringComparison(r".*foofoofoof...ofoofoofoo.*"),
)
)
def test_untrimmed(self):
"""Should not trim"""
req = '{"jsonrpc": "2.0", "method": "go", "params": {"foo": "%s"}}' % (
"foo" * 100,
)
with LogCapture() as capture:
DummyClient().log_request(req, trim_log_values=False)
capture.check(
(
"jsonrpcclient.client.request",
"INFO",
StringComparison(r".*" + "foo" * 100 + ".*"),
)
)
class TestLogResponse:
def test(self):
with LogCapture() as capture:
DummyClient().log_response(
Response('{"jsonrpc": "2.0", "result": 5, "id": 1}')
)
capture.check(
(
"jsonrpcclient.client.response",
"INFO",
StringComparison(r'.*"result": 5.*'),
)
)
def test_trimmed(self):
req = '{"jsonrpc": "2.0", "result": "%s", "id": 1}' % ("foo" * 100,)
with LogCapture() as capture:
DummyClient().log_response(Response(req), trim_log_values=True)
capture.check(
(
"jsonrpcclient.client.response",
"INFO",
StringComparison(r".*foofoofoof...ofoofoofoo.*"),
)
)
def test_untrimmed(self):
"""Should not trim"""
res = '{"jsonrpc": "2.0", "result": {"foo": "%s"}}' % ("foo" * 100,)
with LogCapture() as capture:
DummyClient().log_response(Response(res), trim_log_values=False)
capture.check(
(
"jsonrpcclient.client.response",
"INFO",
StringComparison(r".*" + "foo" * 100 + ".*"),
)
)
def test_basic_logging():
c = DummyClient(basic_logging=True)
assert len(request_log.handlers) == 1
assert len(response_log.handlers) == 1
def test_basic_logging_call():
c = DummyClient().basic_logging()
assert len(request_log.handlers) == 1
assert len(response_log.handlers) == 1
def test_basic_logging_twice():
c = DummyClient(basic_logging=True)
c.basic_logging()
assert len(request_log.handlers) == 1
assert len(response_log.handlers) == 1
@patch("jsonrpcclient.client.request_log")
def test_send_string(*_):
request = '{"jsonrpc": "2.0", "method": "foo", "id": 1}'
response = DummyClient().send(request)
assert response.data.ok == True
assert response.data.result == 1
@patch("jsonrpcclient.client.request_log")
def test_send_dict(*_):
request = {"jsonrpc": "2.0", "method": "foo", "id": 1}
response = DummyClient().send(request)
assert response.data.ok == True
@patch("jsonrpcclient.client.request_log")
def test_send_batch(*_):
requests = [Request("foo"), Request("bar")]
response = DummyClient().send(requests)
assert response.data.ok == True
@patch("jsonrpcclient.client.request_log")
def test_request(*_):
response = DummyClient().request("multiply", 3, 5)
assert response.data.ok == True
@patch("jsonrpcclient.client.request_log")
def test_notify(*_):
response = DummyClient().notify("multiply", 3, 5)
assert response.data.ok == True
@patch("jsonrpcclient.client.request_log")
def test_alternate_usage(self, *_):
response = DummyClient().multiply(3, 5)
assert response.data.ok == True
@patch("jsonrpcclient.client.request_log")
def test_send_single_request_error(*_):
with pytest.raises(ReceivedErrorResponseError):
client = DummyClient()
client.send_message = Mock(
return_value=Response(
'{"jsonrpc": "2.0", "error": {"code": 1, "message": "foo"}, "id": 1}'
)
)
client.request("ping")
|
/**
* DSS - Digital Signature Services
* Copyright (C) 2015 European Commission, provided under the CEF programme
*
* This file is part of the "DSS - Digital Signature Services" project.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package eu.europa.esig.dss.tsl.parsing;
import java.util.List;
import eu.europa.esig.dss.spi.tsl.OtherTSLPointer;
public class LOTLParsingResult extends AbstractParsingResult {
private List<OtherTSLPointer> lotlPointers;
private List<OtherTSLPointer> tlPointers;
private String signingCertificateAnnouncementURL;
private List<String> pivotURLs;
public LOTLParsingResult() {
super();
}
public List<OtherTSLPointer> getLotlPointers() {
return lotlPointers;
}
public void setLotlPointers(List<OtherTSLPointer> lotlPointers) {
this.lotlPointers = lotlPointers;
}
public List<OtherTSLPointer> getTlPointers() {
return tlPointers;
}
public void setTlPointers(List<OtherTSLPointer> tlPointers) {
this.tlPointers = tlPointers;
}
public String getSigningCertificateAnnouncementURL() {
return signingCertificateAnnouncementURL;
}
public void setSigningCertificateAnnouncementURL(String signingCertificateAnnouncementURL) {
this.signingCertificateAnnouncementURL = signingCertificateAnnouncementURL;
}
public List<String> getPivotURLs() {
return pivotURLs;
}
public void setPivotURLs(List<String> pivotURLs) {
this.pivotURLs = pivotURLs;
}
}
|
<!doctype html>
<html>
<head>
<title>Tooltip Hooks</title>
<script src="../../dist/Chart.min.js"></script>
<script src="../utils.js"></script>
<style>
canvas {
-moz-user-select: none;
-webkit-user-select: none;
-ms-user-select: none;
}
</style>
</head>
<body>
<div style="width:75%;">
<canvas id="canvas"></canvas>
</div>
<script>
var config = {
type: 'line',
data: {
labels: ['January', 'February', 'March', 'April', 'May', 'June', 'July'],
datasets: [{
label: 'My First dataset',
borderColor: window.chartColors.red,
backgroundColor: window.chartColors.red,
data: [
randomScalingFactor(),
randomScalingFactor(),
randomScalingFactor(),
randomScalingFactor(),
randomScalingFactor(),
randomScalingFactor(),
randomScalingFactor()
],
fill: false,
}, {
label: 'My Second dataset',
borderColor: window.chartColors.blue,
backgroundColor: window.chartColors.blue,
data: [
randomScalingFactor(),
randomScalingFactor(),
randomScalingFactor(),
randomScalingFactor(),
randomScalingFactor(),
randomScalingFactor(),
randomScalingFactor()
],
fill: false,
}]
},
options: {
responsive: true,
title: {
display: true,
text: 'Chart.js Line Chart - Custom Information in Tooltip'
},
tooltips: {
mode: 'index',
callbacks: {
// Use the footer callback to display the sum of the items showing in the tooltip
footer: function(tooltipItems, data) {
var sum = 0;
tooltipItems.forEach(function(tooltipItem) {
sum += data.datasets[tooltipItem.datasetIndex].data[tooltipItem.index];
});
return 'Sum: ' + sum;
},
},
footerFontStyle: 'normal'
},
hover: {
mode: 'index',
intersect: true
},
scales: {
xAxes: [{
display: true,
scaleLabel: {
show: true,
labelString: 'Month'
}
}],
yAxes: [{
display: true,
scaleLabel: {
show: true,
labelString: 'Value'
}
}]
}
}
};
window.onload = function() {
var ctx = document.getElementById('canvas').getContext('2d');
window.myLine = new Chart(ctx, config);
};
</script>
</body>
</html>
|
# -*- coding: utf-8 -*-
#
# Copyright 2011 Sybren A. Stüvel <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python compatibility wrappers."""
"from __future__ import absolute_import"
from third_party import itertools
import sys
from struct import pack
MAX_INT = sys.maxsize
MAX_INT64 = (1 << 63) - 1
MAX_INT32 = (1 << 31) - 1
MAX_INT16 = (1 << 15) - 1
PY2 = sys.version_info[0] == 2
# Determine the word size of the processor.
if MAX_INT == MAX_INT64:
# 64-bit processor.
MACHINE_WORD_SIZE = 64
elif MAX_INT == MAX_INT32:
# 32-bit processor.
MACHINE_WORD_SIZE = 32
else:
# Else we just assume 64-bit processor keeping up with modern times.
MACHINE_WORD_SIZE = 64
if PY2:
integer_types = (int, long)
range = xrange
zip = itertools.izip
else:
integer_types = (int, )
range = range
zip = zip
def write_to_stdout(data):
"""Writes bytes to stdout
:type data: bytes
"""
if PY2:
sys.stdout.write(data)
else:
# On Py3 we must use the buffer interface to write bytes.
sys.stdout.buffer.write(data)
def is_bytes(obj):
"""
Determines whether the given value is a byte string.
:param obj:
The value to test.
:returns:
``True`` if ``value`` is a byte string; ``False`` otherwise.
"""
return isinstance(obj, bytes)
def is_integer(obj):
"""
Determines whether the given value is an integer.
:param obj:
The value to test.
:returns:
``True`` if ``value`` is an integer; ``False`` otherwise.
"""
return isinstance(obj, integer_types)
def byte(num):
"""
Converts a number between 0 and 255 (both inclusive) to a base-256 (byte)
representation.
Use it as a replacement for ``chr`` where you are expecting a byte
because this will work on all current versions of Python::
:param num:
An unsigned integer between 0 and 255 (both inclusive).
:returns:
A single byte.
"""
return pack("B", num)
def xor_bytes(b1, b2):
"""
Returns the bitwise XOR result between two bytes objects, b1 ^ b2.
Bitwise XOR operation is commutative, so order of parameters doesn't
generate different results. If parameters have different length, extra
length of the largest one is ignored.
:param b1:
First bytes object.
:param b2:
Second bytes object.
:returns:
Bytes object, result of XOR operation.
"""
if PY2:
return ''.join(byte(ord(x) ^ ord(y)) for x, y in zip(b1, b2))
return bytes(x ^ y for x, y in zip(b1, b2))
def get_word_alignment(num, force_arch=64,
_machine_word_size=MACHINE_WORD_SIZE):
"""
Returns alignment details for the given number based on the platform
Python is running on.
:param num:
Unsigned integral number.
:param force_arch:
If you don't want to use 64-bit unsigned chunks, set this to
anything other than 64. 32-bit chunks will be preferred then.
Default 64 will be used when on a 64-bit machine.
:param _machine_word_size:
(Internal) The machine word size used for alignment.
:returns:
4-tuple::
(word_bits, word_bytes,
max_uint, packing_format_type)
"""
max_uint64 = 0xffffffffffffffff
max_uint32 = 0xffffffff
max_uint16 = 0xffff
max_uint8 = 0xff
if force_arch == 64 and _machine_word_size >= 64 and num > max_uint32:
# 64-bit unsigned integer.
return 64, 8, max_uint64, "Q"
elif num > max_uint16:
# 32-bit unsigned integer
return 32, 4, max_uint32, "L"
elif num > max_uint8:
# 16-bit unsigned integer.
return 16, 2, max_uint16, "H"
else:
# 8-bit unsigned integer.
return 8, 1, max_uint8, "B"
|
# Copyright 2015 Joe Block <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Helper functions for use in ec2.
"""
import json
import subprocess
import urllib2
import boto.ec2
import boto.utils
def getAWSAccountID():
'''
Print an instance's AWS account number or 0 when not in EC2
'''
link = "http://169.254.169.254/latest/dynamic/instance-identity/document"
try:
conn = urllib2.urlopen(url=link, timeout=5)
except urllib2.URLError:
return '0'
jsonData = json.loads(conn.read())
return jsonData['accountId']
# metadata helper functions
def loadInstanceMetadata():
"""Loads the instance metadata for the instance we're running on
:returns: instance metadata
"""
return boto.utils.get_instance_metadata(timeout=1, num_retries=3)
def getMetadataKey(name):
"""
Returns a given metadata key
:param str name: Name of key to retrieve
:returns: the value of the specified metadata key
:rtype: str
"""
return loadInstanceMetadata()[name]
def myAMIid():
"""Determine the AMI ID for the running instance
:returns: ami ID
:rtype: str
"""
return loadInstanceMetadata()["ami-id"]
def myInstanceID():
"""Determine the instance ID for the running instance
:returns: instanceID
:rtype: str
"""
return loadInstanceMetadata()["instance-id"]
def myInstanceType():
"""Determine the instance type of the running instance
:returns: instance type
:rtype: str
"""
return loadInstanceMetadata()["instance-type"]
def myPublicIPv4():
"""Determine the public IP v4 for the running instance
:returns: Instance's public IP v4
:rtype: str
"""
return loadInstanceMetadata()["public-ipv4"]
def myRegion():
"""Returns the region of the running instance
:returns str: region
:rtype: str
"""
return loadInstanceMetadata()["placement"]["availability-zone"][:-1]
# Tag helpers
def readInstanceTag(instanceID, tagName="Name", connection=None):
"""
Load a tag from EC2
:param str instanceID: Instance ID to read the tag on
:param str tagName: Name of tag to load
:param connection: optional boto connection to use
:returns: the tag's value
:rtype: str
"""
assert isinstance(instanceID, basestring), ("instanceID must be a string but is %r" % instanceID)
assert isinstance(tagName, basestring), ("tagName must be a string but is %r" % tagName)
if not connection:
# Assume AWS credentials are in the environment or the instance is using an IAM role
connection = boto.ec2.connect_to_region(myRegion())
# Filter the tag values for our instance_id
# http://docs.aws.amazon.com/AWSEC2/latest/CommandLineReference/ApiReference-cmd-DescribeTags.html
tagData = connection.get_all_tags(filters={"resource-id": instanceID, "key": tagName})
if tagData:
tagValue = tagData[0].value
else:
raise RuntimeError, "%s: No such tag on %s" % (tagName, instanceID)
return tagValue
def readMyEC2Tag(tagName, connection=None):
"""
Load an EC2 tag for the running instance & print it.
:param str tagName: Name of the tag to read
:param connection: Optional boto connection
"""
assert isinstance(tagName, basestring), ("tagName must be a string but is %r" % tagName)
# Load metadata. if == {} we are on localhost
# http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/AESDG-chapter-instancedata.html
if not connection:
# Assume AWS credentials are in the environment or the instance is using an IAM role
connection = boto.ec2.connect_to_region(myRegion())
return readInstanceTag(connection=connection,
instanceID=myInstanceID(),
tagName=tagName)
def system_call(command):
"""Run a command and return stdout.
:param str command: command to run
:returns: output of the command
:rtype: str
Would be better to use subprocess.check_output, but this works on 2.6,
which is still the system Python on CentOS 7.
"""
p = subprocess.Popen([command], stdout=subprocess.PIPE, shell=True)
return p.stdout.read()
def inEC2():
"""Detect if we're running in EC2.
This check only works if we're running as root
"""
dmidata = system_call('dmidecode -s bios-version').strip().lower()
return 'amazon' in dmidata
|
import uuid
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
from django.test.client import Client
import unittest
from django.test.utils import override_settings
from selenium.webdriver.support.wait import WebDriverWait
from dss import settings
from spa.tests import facebook_dom
from spa.tests.webdriver import CustomWebDriver
from utils import here
class TestUploadMix(LiveServerTestCase):
TIMEOUT = 20
def setUp(self):
User.objects.create_superuser(username='admin',
password='pw',
email='[email protected]')
self.wd = CustomWebDriver()
self.wd.implicitly_wait(100)
self.wd.set_page_load_timeout(100)
self.waiter = WebDriverWait(self.wd, self.TIMEOUT)
def tearDown(self):
self.wd.quit()
@override_settings(DEBUG=True)
def test_upload(self):
self.login()
self.open('/mix/upload')
self.wd.find_css('.btn-next').click()
self.
print "Tests completed"
def open(self, url):
self.wd.get("%s%s" % (self.live_server_url, url))
def login(self):
self.open(reverse('admin:index'))
self.wd.find_css('#id_username').send_keys("admin")
self.wd.find_css("#id_password").send_keys('pw')
self.wd.find_element_by_xpath('//input[@value="Log in"]').click()
self.wd.find_css("#grp-content-container")
|
//#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.ocs_if.vo.beans;
public class IfInvestigationLiteVoBean extends ims.vo.ValueObjectBean
{
public IfInvestigationLiteVoBean()
{
}
public IfInvestigationLiteVoBean(ims.ocs_if.vo.IfInvestigationLiteVo vo)
{
this.id = vo.getBoId();
this.version = vo.getBoVersion();
this.providerinvcode = vo.getProviderInvCode();
this.activestatus = vo.getActiveStatus() == null ? null : (ims.vo.LookupInstanceBean)vo.getActiveStatus().getBean();
this.separateorder = vo.getSeparateOrder();
this.investigationindex = vo.getInvestigationIndex() == null ? null : (ims.ocs_if.vo.beans.IfInvIdxLiteVoBean)vo.getInvestigationIndex().getBean();
this.pathinvdetails = vo.getPathInvDetails() == null ? null : (ims.ocrr.vo.beans.PathInvDetailsVoBean)vo.getPathInvDetails().getBean();
this.type = vo.getType() == null ? null : (ims.vo.LookupInstanceBean)vo.getType().getBean();
this.eventtype = vo.getEventType() == null ? null : (ims.vo.LookupInstanceBean)vo.getEventType().getBean();
}
public void populate(ims.vo.ValueObjectBeanMap map, ims.ocs_if.vo.IfInvestigationLiteVo vo)
{
this.id = vo.getBoId();
this.version = vo.getBoVersion();
this.providerinvcode = vo.getProviderInvCode();
this.activestatus = vo.getActiveStatus() == null ? null : (ims.vo.LookupInstanceBean)vo.getActiveStatus().getBean();
this.separateorder = vo.getSeparateOrder();
this.investigationindex = vo.getInvestigationIndex() == null ? null : (ims.ocs_if.vo.beans.IfInvIdxLiteVoBean)vo.getInvestigationIndex().getBean(map);
this.pathinvdetails = vo.getPathInvDetails() == null ? null : (ims.ocrr.vo.beans.PathInvDetailsVoBean)vo.getPathInvDetails().getBean(map);
this.type = vo.getType() == null ? null : (ims.vo.LookupInstanceBean)vo.getType().getBean();
this.eventtype = vo.getEventType() == null ? null : (ims.vo.LookupInstanceBean)vo.getEventType().getBean();
}
public ims.ocs_if.vo.IfInvestigationLiteVo buildVo()
{
return this.buildVo(new ims.vo.ValueObjectBeanMap());
}
public ims.ocs_if.vo.IfInvestigationLiteVo buildVo(ims.vo.ValueObjectBeanMap map)
{
ims.ocs_if.vo.IfInvestigationLiteVo vo = null;
if(map != null)
vo = (ims.ocs_if.vo.IfInvestigationLiteVo)map.getValueObject(this);
if(vo == null)
{
vo = new ims.ocs_if.vo.IfInvestigationLiteVo();
map.addValueObject(this, vo);
vo.populate(map, this);
}
return vo;
}
public Integer getId()
{
return this.id;
}
public void setId(Integer value)
{
this.id = value;
}
public int getVersion()
{
return this.version;
}
public void setVersion(int value)
{
this.version = value;
}
public String getProviderInvCode()
{
return this.providerinvcode;
}
public void setProviderInvCode(String value)
{
this.providerinvcode = value;
}
public ims.vo.LookupInstanceBean getActiveStatus()
{
return this.activestatus;
}
public void setActiveStatus(ims.vo.LookupInstanceBean value)
{
this.activestatus = value;
}
public Boolean getSeparateOrder()
{
return this.separateorder;
}
public void setSeparateOrder(Boolean value)
{
this.separateorder = value;
}
public ims.ocs_if.vo.beans.IfInvIdxLiteVoBean getInvestigationIndex()
{
return this.investigationindex;
}
public void setInvestigationIndex(ims.ocs_if.vo.beans.IfInvIdxLiteVoBean value)
{
this.investigationindex = value;
}
public ims.ocrr.vo.beans.PathInvDetailsVoBean getPathInvDetails()
{
return this.pathinvdetails;
}
public void setPathInvDetails(ims.ocrr.vo.beans.PathInvDetailsVoBean value)
{
this.pathinvdetails = value;
}
public ims.vo.LookupInstanceBean getType()
{
return this.type;
}
public void setType(ims.vo.LookupInstanceBean value)
{
this.type = value;
}
public ims.vo.LookupInstanceBean getEventType()
{
return this.eventtype;
}
public void setEventType(ims.vo.LookupInstanceBean value)
{
this.eventtype = value;
}
private Integer id;
private int version;
private String providerinvcode;
private ims.vo.LookupInstanceBean activestatus;
private Boolean separateorder;
private ims.ocs_if.vo.beans.IfInvIdxLiteVoBean investigationindex;
private ims.ocrr.vo.beans.PathInvDetailsVoBean pathinvdetails;
private ims.vo.LookupInstanceBean type;
private ims.vo.LookupInstanceBean eventtype;
}
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def GeneralHostWarningEvent(vim, *args, **kwargs):
'''This event is the general warning event for a host.'''
obj = vim.client.factory.create('ns0:GeneralHostWarningEvent')
# do some validation checking...
if (len(args) + len(kwargs)) < 5:
raise IndexError('Expected at least 6 arguments got: %d' % len(args))
required = [ 'message', 'chainId', 'createdTime', 'key', 'userName' ]
optional = [ 'changeTag', 'computeResource', 'datacenter', 'ds', 'dvs',
'fullFormattedMessage', 'host', 'net', 'vm', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
/*
This file is part of Poti
Poti is free software: you can redistribute it and/or modify
it under the terms of the GNU Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Poti is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Public License for more details.
You should have received a copy of the GNU Public License
along with Poti. If not, see <http://www.gnu.org/licenses/>.
*/
#include <poti.h>
int main (int argc, char **argv)
{
poti_init ();
poti_header ();
poti_DefineContainerType ("ROOT", "0", "ROOT");
poti_DefineContainerType ("PROCESS", "ROOT", "PROCESS");
poti_DefineContainerType ("THREAD", "PROCESS", "THREAD");
poti_CreateContainer (0, "root", "ROOT", "0", "root");
poti_CreateContainer (0, "p1", "PROCESS", "root", "p1");
poti_CreateContainer (0, "p2", "PROCESS", "root", "p2");
poti_CreateContainer (0, "p3", "PROCESS", "root", "p3");
poti_CreateContainer (0, "1t1", "THREAD", "p1", "t1");
poti_CreateContainer (0, "1t2", "THREAD", "p1", "t2");
poti_CreateContainer (0, "1t3", "THREAD", "p1", "t3");
poti_CreateContainer (0, "2t1", "THREAD", "p2", "t1");
poti_CreateContainer (0, "2t2", "THREAD", "p2", "t2");
poti_CreateContainer (0, "3t1", "THREAD", "p3", "t1");
poti_DestroyContainer (0.76, "PROCESS", "p2");
poti_DestroyContainer (1.34, "ROOT", "root");
poti_close();
return 0;
}
|
jQuery( document ).ready( function () {
jQuery( "#itsec_file_change_enabled" ).change(function () {
if ( jQuery( "#itsec_file_change_enabled" ).is( ':checked' ) ) {
jQuery( "#file_change-settings" ).show();
} else {
jQuery( "#file_change-settings" ).hide();
}
} ).change();
if ( itsec_file_change.mem_limit <= 128 ) {
jQuery( "#itsec_file_change_enabled" ).change( function () {
if ( this.checked ) {
alert( itsec_file_change.text );
}
} );
}
jQuery( '.jquery_file_tree' ).fileTree(
{
root: itsec_file_change.ABSPATH,
script: ajaxurl,
expandSpeed: - 1,
collapseSpeed: - 1,
multiFolder: false
}, function ( file ) {
jQuery( '#itsec_file_change_file_list' ).val( file.substring( itsec_file_change.ABSPATH.length ) + "\n" + jQuery( '#itsec_file_change_file_list' ).val() );
}, function ( directory ) {
jQuery( '#itsec_file_change_file_list' ).val( directory.substring( itsec_file_change.ABSPATH.length ) + "\n" + jQuery( '#itsec_file_change_file_list' ).val() );
}
);
//process tooltip actions
jQuery( '#itsec_one_time_file_check' ).submit( function ( event ) {
event.preventDefault();
var data = {
action: 'itsec_file_change_ajax',
nonce: itsec_file_change.nonce
};
//let user know we're working
jQuery( "#itsec_one_time_file_check_submit" ).removeClass( 'button-primary' ).addClass( 'button-secondary' ).attr( 'value', itsec_file_change.scanning_button_text );
//call the ajax
jQuery.ajax(
{
url: ajaxurl,
type: 'POST',
data: data,
complete: function ( response ) {
if ( response.responseText == 1 || response.responseText == - 1 ) {
window.location.replace( '?page=toplevel_page_itsec_logs' )
}
jQuery( "#itsec_one_time_file_check_submit" ).removeClass( 'button-secondary' ).addClass( 'button-primary' ).attr( 'value', itsec_file_change.button_text );
if ( response.responseText == 0 ) {
jQuery( "#itsec_file_change_status" ).text( itsec_file_change.no_changes );
}
}
}
);
} );
} );
jQuery( window ).load( function () {
jQuery( document ).on( 'mouseover mouseout', '.jqueryFileTree > li a', function ( event ) {
if ( event.type == 'mouseover' ) {
jQuery( this ).children( '.itsec_treeselect_control' ).css( 'visibility', 'visible' );
} else {
jQuery( this ).children( '.itsec_treeselect_control' ).css( 'visibility', 'hidden' );
}
} );
} );
|
# Django settings for {{ project_name }} project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory that holds static files.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL that handles the static files served from STATIC_ROOT.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin media -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# A list of locations of additional static files
STATICFILES_DIRS = ()
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = '{{ project_name }}.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request':{
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import cStringIO
import difflib
import os
import sys
import unittest
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_TOOLS_DIR = os.path.dirname(SCRIPT_DIR)
sys.path.append(BUILD_TOOLS_DIR)
import easy_template
class EasyTemplateTestCase(unittest.TestCase):
def _RunTest(self, template, expected, template_dict):
src = cStringIO.StringIO(template)
dst = cStringIO.StringIO()
easy_template.RunTemplate(src, dst, template_dict)
if dst.getvalue() != expected:
expected_lines = expected.splitlines(1)
actual_lines = dst.getvalue().splitlines(1)
diff = ''.join(difflib.unified_diff(
expected_lines, actual_lines,
fromfile='expected', tofile='actual'))
self.fail('Unexpected output:\n' + diff)
def testEmpty(self):
self._RunTest('', '', {})
def testNewlines(self):
self._RunTest('\n\n', '\n\n', {})
def testNoInterpolation(self):
template = """I love paris in the
the springtime [don't you?]
{this is not interpolation}.
"""
self._RunTest(template, template, {})
def testSimpleInterpolation(self):
self._RunTest(
'{{foo}} is my favorite number',
'42 is my favorite number',
{'foo': 42})
def testLineContinuations(self):
template = "Line 1 \\\nLine 2\n"""
self._RunTest(template, template, {})
def testIfStatement(self):
template = r"""
[[if foo:]]
foo
[[else:]]
not foo
[[]]"""
self._RunTest(template, "\n foo\n", {'foo': True})
self._RunTest(template, "\n not foo\n", {'foo': False})
def testForStatement(self):
template = r"""[[for beers in [99, 98, 1]:]]
{{beers}} bottle{{(beers != 1) and 's' or ''}} of beer on the wall...
[[]]"""
expected = r"""99 bottles of beer on the wall...
98 bottles of beer on the wall...
1 bottle of beer on the wall...
"""
self._RunTest(template, expected, {})
def testListVariables(self):
template = r"""
[[for i, item in enumerate(my_list):]]
{{i+1}}: {{item}}
[[]]
"""
self._RunTest(template, "\n1: Banana\n2: Grapes\n3: Kumquat\n",
{'my_list': ['Banana', 'Grapes', 'Kumquat']})
def testListInterpolation(self):
template = "{{', '.join(growing[0:-1]) + ' and ' + growing[-1]}} grow..."
self._RunTest(template, "Oats, peas, beans and barley grow...",
{'growing': ['Oats', 'peas', 'beans', 'barley']})
self._RunTest(template, "Love and laughter grow...",
{'growing': ['Love', 'laughter']})
def testComplex(self):
template = r"""
struct {{name}} {
[[for field in fields:]]
[[ if field['type'] == 'array':]]
{{field['basetype']}} {{field['name']}}[{{field['size']}}];
[[ else:]]
{{field['type']}} {{field['name']}};
[[ ]]
[[]]
};"""
expected = r"""
struct Foo {
std::string name;
int problems[99];
};"""
self._RunTest(template, expected, {
'name': 'Foo',
'fields': [
{'name': 'name', 'type': 'std::string'},
{'name': 'problems', 'type': 'array', 'basetype': 'int', 'size': 99}]})
if __name__ == '__main__':
unittest.main()
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class G4incl(Package):
"""Geant4 data for evaluated particle cross-sections on natural
composition of elements"""
homepage = "http://geant4.web.cern.ch"
url = "http://geant4-data.web.cern.ch/geant4-data/datasets/G4INCL.1.0.tar.gz"
maintainers = ['drbenmorgan']
# Only versions relevant to Geant4 releases built by spack are added
version('1.0', sha256='716161821ae9f3d0565fbf3c2cf34f4e02e3e519eb419a82236eef22c2c4367d')
def install(self, spec, prefix):
mkdirp(join_path(prefix.share, 'data'))
install_path = join_path(prefix.share, 'data', "G4INCL{0}"
.format(self.version))
install_tree(self.stage.source_path, install_path)
def setup_dependent_run_environment(self, env, dependent_spec):
install_path = join_path(self.prefix.share, 'data', 'G4INCL{0}'
.format(self.version))
env.set('G4INCLDATA', install_path)
def url_for_version(self, version):
"""Handle version string."""
return ("http://geant4-data.web.cern.ch/geant4-data/datasets/G4INCL.%s.tar.gz" % version)
|
import gtk
import ns.core
import ns.network
import ns.internet
from ns.ndnSIM import ndn
from visualizer.base import InformationWindow
class ShowNdnPit(InformationWindow):
(
COLUMN_PREFIX,
COLUMN_FACE
) = range(2)
def __init__(self, visualizer, node_index):
InformationWindow.__init__(self)
self.win = gtk.Dialog(parent=visualizer.window,
flags=gtk.DIALOG_DESTROY_WITH_PARENT|gtk.DIALOG_NO_SEPARATOR,
buttons=(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE))
self.win.connect("response", self._response_cb)
self.node = ns.network.NodeList.GetNode (node_index)
node_name = ns.core.Names.FindName (self.node)
title = "Ndn PIT for node %i" % node_index
if len(node_name) != 0:
title += " (" + str(node_name) + ")"
self.win.set_title (title)
self.visualizer = visualizer
self.node_index = node_index
self.table_model = gtk.ListStore(str, str, int)
treeview = gtk.TreeView(self.table_model)
treeview.show()
sw = gtk.ScrolledWindow()
sw.set_properties(hscrollbar_policy=gtk.POLICY_AUTOMATIC,
vscrollbar_policy=gtk.POLICY_AUTOMATIC)
sw.show()
sw.add(treeview)
self.win.vbox.add(sw)
self.win.set_default_size(600, 300)
# Dest.
column = gtk.TreeViewColumn('Prefix', gtk.CellRendererText(),
text=self.COLUMN_PREFIX)
treeview.append_column(column)
# Interface
column = gtk.TreeViewColumn('Info', gtk.CellRendererText(),
text=self.COLUMN_FACE)
treeview.append_column(column)
self.visualizer.add_information_window(self)
self.win.show()
def _response_cb(self, win, response):
self.win.destroy()
self.visualizer.remove_information_window(self)
def update(self):
ndnPit = ndn.L3Protocol.getL3Protocol(self.node).getForwarder().getPit()
if ndnPit is None:
return
self.table_model.clear()
for item in ndnPit:
tree_iter = self.table_model.append()
self.table_model.set(tree_iter,
self.COLUMN_PREFIX, str(item.getName()),
self.COLUMN_FACE, str(item.getInterest()))
def populate_node_menu(viz, node, menu):
menu_item = gtk.MenuItem("Show NDN PIT")
menu_item.show()
def _show_ndn_pit(dummy_menu_item):
ShowNdnPit(viz, node.node_index)
menu_item.connect("activate", _show_ndn_pit)
menu.add(menu_item)
def register(viz):
viz.connect("populate-node-menu", populate_node_menu)
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "android_webview/browser/aw_pdf_exporter.h"
#include <memory>
#include <utility>
#include <vector>
#include "android_webview/browser/aw_print_manager.h"
#include "android_webview/browser_jni_headers/AwPdfExporter_jni.h"
#include "base/android/jni_android.h"
#include "base/android/jni_array.h"
#include "base/bind.h"
#include "content/public/browser/browser_thread.h"
#include "printing/print_settings.h"
#include "printing/units.h"
using base::android::JavaParamRef;
using base::android::JavaRef;
using base::android::ScopedJavaLocalRef;
namespace android_webview {
namespace {
void JNI_AwPdfExporter_GetPageRanges(JNIEnv* env,
const JavaRef<jintArray>& int_arr,
printing::PageRanges* range_vector) {
std::vector<int> pages;
base::android::JavaIntArrayToIntVector(env, int_arr, &pages);
for (int page : pages) {
printing::PageRange range;
range.from = page;
range.to = page;
range_vector->push_back(range);
}
}
} // namespace
AwPdfExporter::AwPdfExporter(JNIEnv* env,
const JavaRef<jobject>& obj,
content::WebContents* web_contents)
: java_ref_(env, obj), web_contents_(web_contents) {
DCHECK(!obj.is_null());
Java_AwPdfExporter_setNativeAwPdfExporter(env, obj,
reinterpret_cast<intptr_t>(this));
}
AwPdfExporter::~AwPdfExporter() {
JNIEnv* env = base::android::AttachCurrentThread();
ScopedJavaLocalRef<jobject> obj = java_ref_.get(env);
if (obj.is_null())
return;
// Clear the Java peer's weak pointer to |this| object.
Java_AwPdfExporter_setNativeAwPdfExporter(env, obj, 0);
}
void AwPdfExporter::ExportToPdf(JNIEnv* env,
const JavaParamRef<jobject>& obj,
int fd,
const JavaParamRef<jintArray>& pages,
const JavaParamRef<jobject>& cancel_signal) {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
printing::PageRanges page_ranges;
JNI_AwPdfExporter_GetPageRanges(env, pages, &page_ranges);
AwPrintManager* print_manager = AwPrintManager::CreateForWebContents(
web_contents_, CreatePdfSettings(env, obj, page_ranges), fd,
base::BindRepeating(&AwPdfExporter::DidExportPdf,
base::Unretained(this)));
if (!print_manager->PrintNow())
DidExportPdf(0);
}
namespace {
// Converts from 1/1000 of inches to device units using DPI.
int MilsToDots(int val, int dpi) {
return static_cast<int>(printing::ConvertUnitDouble(val, 1000.0, dpi));
}
} // namespace
std::unique_ptr<printing::PrintSettings> AwPdfExporter::CreatePdfSettings(
JNIEnv* env,
const JavaRef<jobject>& obj,
const printing::PageRanges& page_ranges) {
auto settings = std::make_unique<printing::PrintSettings>();
int dpi = Java_AwPdfExporter_getDpi(env, obj);
int width = Java_AwPdfExporter_getPageWidth(env, obj);
int height = Java_AwPdfExporter_getPageHeight(env, obj);
gfx::Size physical_size_device_units;
int width_in_dots = MilsToDots(width, dpi);
int height_in_dots = MilsToDots(height, dpi);
physical_size_device_units.SetSize(width_in_dots, height_in_dots);
gfx::Rect printable_area_device_units;
// Assume full page is printable for now.
printable_area_device_units.SetRect(0, 0, width_in_dots, height_in_dots);
if (!page_ranges.empty())
settings->set_ranges(page_ranges);
settings->set_dpi(dpi);
// TODO(sgurun) verify that the value for newly added parameter for
// (i.e. landscape_needs_flip) is correct.
settings->SetPrinterPrintableArea(physical_size_device_units,
printable_area_device_units, true);
printing::PageMargins margins;
margins.left = MilsToDots(Java_AwPdfExporter_getLeftMargin(env, obj), dpi);
margins.right = MilsToDots(Java_AwPdfExporter_getRightMargin(env, obj), dpi);
margins.top = MilsToDots(Java_AwPdfExporter_getTopMargin(env, obj), dpi);
margins.bottom =
MilsToDots(Java_AwPdfExporter_getBottomMargin(env, obj), dpi);
settings->SetCustomMargins(margins);
settings->set_should_print_backgrounds(true);
return settings;
}
void AwPdfExporter::DidExportPdf(int page_count) {
JNIEnv* env = base::android::AttachCurrentThread();
ScopedJavaLocalRef<jobject> obj = java_ref_.get(env);
if (obj.is_null())
return;
Java_AwPdfExporter_didExportPdf(env, obj, page_count);
}
} // namespace android_webview
|
using System;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Controls.Primitives;
using System.Windows.Media.Animation;
using Time_Table_Arranging_Program.Class;
using Time_Table_Arranging_Program.Class.Converter;
using Time_Table_Arranging_Program.UserInterface;
namespace Time_Table_Arranging_Program {
/// <summary>
/// Interaction logic for RuleSetter.xaml
/// </summary>
public interface IRuleSetter {
bool IsChecked { get; }
Predicate<Slot> GetRulePredicate();
event EventHandler xButton_Clicked;
}
public partial class RuleSetter : UserControl, IRuleSetter {
private string _day;
private ITime _time1;
private ITime _time2;
private double _widthOfTimeChooser;
private double _widthOfToLabel;
public RuleSetter() {
InitializeComponent();
}
public Predicate<Slot> GetRulePredicate() {
_day = (DayCombobox.SelectedItem as ComboBoxItem).Content as string;
var x = (PredicateCombobox.SelectedItem as ComboBoxItem).Content as string;
_time1 = TimeChooser1?.GetChosenTime();
_time2 = TimeChooser2?.GetChosenTime();
if (x == "All day") {
return AllDayPredicate;
}
if (x == "Before") {
return BeforePredicate;
}
if (x == "After") {
return AfterPredicate;
}
if (x == "Between") {
return BetweenPredicate;
}
return null;
}
public event EventHandler xButton_Clicked;
public bool IsChecked {
get { return ToggleButton.IsChecked.Value; }
}
private void PredicateCombobox_OnSelectionChanged(object sender, SelectionChangedEventArgs e) {
var selectedText = (PredicateCombobox.SelectedItem as ComboBoxItem).Content as string;
if (selectedText == "All day") {
ToggleView(TimeChooser1, false);
ToggleView(ToLabel, false);
ToggleView(TimeChooser2, false);
}
else if (selectedText == "Before" || selectedText == "After") {
ToggleView(TimeChooser1, true);
ToggleView(ToLabel, false);
ToggleView(TimeChooser2, false);
}
else if (selectedText == "Between") {
ToggleView(TimeChooser1, true);
ToggleView(ToLabel, true);
ToggleView(TimeChooser2, true);
}
}
private void ToggleView(Control ui, bool isShow) {
DoubleAnimation animation;
if (isShow) {
if (ui.ActualWidth != 0) return;
animation = CustomAnimation.GetEnteringScreenAnimation(0,
ui is Label ? _widthOfToLabel : _widthOfTimeChooser, false);
ui.BeginAnimation(WidthProperty, animation);
}
else {
if (ui.ActualWidth == 0) return;
animation =
CustomAnimation.GetLeavingScreenAnimation(ui is Label ? _widthOfToLabel : _widthOfTimeChooser, 0,
false);
ui.BeginAnimation(WidthProperty, animation);
}
}
private bool AllDayPredicate(Slot s) {
var d = Day.Parse(_day);
return s.Day.Equals(d);
//return s.Day == _day;
}
private bool BeforePredicate(Slot s) {
return s.StartTime.LessThan(_time1) && AllDayPredicate(s);
}
private bool AfterPredicate(Slot s) {
return s.EndTime.MoreThan(_time1) && AllDayPredicate(s);
}
private bool BetweenPredicate(Slot s) {
var timePeriod = new TimePeriod(_time1, _time2);
return
s.TimePeriod.IntersectWith(timePeriod)
&&
AllDayPredicate(s);
}
private void XButton_OnClick(object sender, RoutedEventArgs e) {
xButton_Clicked(this, null);
}
private void RuleSetter_OnLoaded(object sender, RoutedEventArgs e) {
_widthOfTimeChooser = TimeChooser1.ActualWidth;
_widthOfToLabel = ToLabel.ActualWidth;
TimeChooser1.Width = 0;
TimeChooser2.Width = 0;
ToLabel.Width = 0;
}
private void ToggleButton_OnClick(object sender, RoutedEventArgs e) {
var b = sender as ToggleButton;
StackPanel.IsEnabled = b.IsChecked.Value;
}
}
}
|
package liquibase.change.core;
import liquibase.change.StandardChangeTest;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
/**
* Tests for {@link RawSQLChange}
*/
public abstract class RawSQLChangeTest extends StandardChangeTest {
private RawSQLChange refactoring;
@Before
public void setUp() throws Exception {
refactoring = new RawSQLChange();
}
@Override
@Test
public void getRefactoringName() throws Exception {
assertEquals("Custom SQL", new RawSQLChange().getChangeMetaData().getName());
}
// @Test
// public void generateStatement() throws Exception {
// refactoring.setSql("SQL STATEMENT HERE");
// OracleDatabase database = new OracleDatabase();
// assertEquals("SQL STATEMENT HERE", refactoring.generateStatements(database)[0].getSqlStatement(database));
// }
@Override
@Test
public void getConfirmationMessage() throws Exception {
assertEquals("Custom SQL executed", refactoring.getConfirmationMessage());
}
}
|
//// [interfacePropertiesWithSameName3.ts]
interface D { a: number; }
interface E { a: string; }
interface F extends E, D { } // error
class D2 { a: number; }
class E2 { a: string; }
interface F2 extends E2, D2 { } // error
//// [interfacePropertiesWithSameName3.js]
var D2 = (function () {
function D2() {
}
return D2;
})();
var E2 = (function () {
function E2() {
}
return E2;
})();
|
var assert = require("assert");
var help = require("../help");
describe("transport tcp", function () {
var transport = new help.modbus.transports.tcp(help.stream());
help.tests().map(function (test) {
var request = Buffer.concat([
help.tcp_header(test.pdu, test.transactionId, test.protocol, test.unitId),
test.pdu
]);
var data = transport.unwrap(request);
describe(test.name + " " + help.print_buffer(request), function () {
if (test.pass === false) {
it("not valid", function () {
assert(data === false);
});
} else {
it("valid", function () {
assert(data !== false);
});
it("transactionId = " + test.transactionId, function () {
assert(data.transactionId === test.transactionId);
});
it("protocol = " + test.protocol, function () {
assert(data.protocol === test.protocol);
});
it("unitId = " + test.unitId, function () {
assert(data.unitId === test.unitId);
});
it("pdu = " + help.print_buffer(test.pdu), function () {
assert(data.pdu.length === test.pdu.length);
help.buffer.values(data.pdu).map(function (_, i) {
assert(data.pdu[i] === test.pdu[i]);
});
});
}
});
});
it("should receive options from constructor helper", function (done) {
var port = 65123;
var proto = 123;
help.modbus.tcp.server({ protocol: proto }, function (stream) {
assert(stream.transport.protocol === proto);
return done();
}).listen(port, function () {
help.modbus.tcp.connect(port, function (err, socket) {
socket.close();
});
});
});
});
|
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transport adapter for http.client, for internal use only."""
import logging
import socket
import six
from six.moves import http_client
from six.moves import urllib
from google.auth import exceptions
from google.auth import transport
_LOGGER = logging.getLogger(__name__)
class Response(transport.Response):
"""http.client transport response adapter.
Args:
response (http.client.HTTPResponse): The raw http client response.
"""
def __init__(self, response):
self._status = response.status
self._headers = {key.lower(): value for key, value in response.getheaders()}
self._data = response.read()
@property
def status(self):
return self._status
@property
def headers(self):
return self._headers
@property
def data(self):
return self._data
class Request(transport.Request):
"""http.client transport request adapter."""
def __call__(
self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
):
"""Make an HTTP request using http.client.
Args:
url (str): The URI to be requested.
method (str): The HTTP method to use for the request. Defaults
to 'GET'.
body (bytes): The payload / body in HTTP request.
headers (Mapping): Request headers.
timeout (Optional(int)): The number of seconds to wait for a
response from the server. If not specified or if None, the
socket global default timeout will be used.
kwargs: Additional arguments passed throught to the underlying
:meth:`~http.client.HTTPConnection.request` method.
Returns:
Response: The HTTP response.
Raises:
google.auth.exceptions.TransportError: If any exception occurred.
"""
# socket._GLOBAL_DEFAULT_TIMEOUT is the default in http.client.
if timeout is None:
timeout = socket._GLOBAL_DEFAULT_TIMEOUT
# http.client doesn't allow None as the headers argument.
if headers is None:
headers = {}
# http.client needs the host and path parts specified separately.
parts = urllib.parse.urlsplit(url)
path = urllib.parse.urlunsplit(
("", "", parts.path, parts.query, parts.fragment)
)
if parts.scheme != "http":
raise exceptions.TransportError(
"http.client transport only supports the http scheme, {}"
"was specified".format(parts.scheme)
)
connection = http_client.HTTPConnection(parts.netloc, timeout=timeout)
try:
_LOGGER.debug("Making request: %s %s", method, url)
connection.request(method, path, body=body, headers=headers, **kwargs)
response = connection.getresponse()
return Response(response)
except (http_client.HTTPException, socket.error) as caught_exc:
new_exc = exceptions.TransportError(caught_exc)
six.raise_from(new_exc, caught_exc)
finally:
connection.close()
|
<?php
declare(strict_types=1);
namespace Shlinkio\Shlink\Core\Tag\Spec;
use Happyr\DoctrineSpecification\Spec;
use Happyr\DoctrineSpecification\Specification\BaseSpecification;
use Happyr\DoctrineSpecification\Specification\Specification;
class CountTagsWithName extends BaseSpecification
{
public function __construct(private string $tagName)
{
parent::__construct();
}
protected function getSpec(): Specification
{
return Spec::countOf(
Spec::andX(
Spec::select('id'),
Spec::eq('name', $this->tagName),
),
);
}
}
|
import sys, math, audiodev
DOT = 30
DAH = 80
OCTAVE = 2 # 1 == 441 Hz, 2 == 882 Hz, ...
SAMPWIDTH = 2
FRAMERATE = 44100
BASEFREQ = 441
QSIZE = 20000
morsetab = {
'A': '.-', 'a': '.-',
'B': '-...', 'b': '-...',
'C': '-.-.', 'c': '-.-.',
'D': '-..', 'd': '-..',
'E': '.', 'e': '.',
'F': '..-.', 'f': '..-.',
'G': '--.', 'g': '--.',
'H': '....', 'h': '....',
'I': '..', 'i': '..',
'J': '.---', 'j': '.---',
'K': '-.-', 'k': '-.-',
'L': '.-..', 'l': '.-..',
'M': '--', 'm': '--',
'N': '-.', 'n': '-.',
'O': '---', 'o': '---',
'P': '.--.', 'p': '.--.',
'Q': '--.-', 'q': '--.-',
'R': '.-.', 'r': '.-.',
'S': '...', 's': '...',
'T': '-', 't': '-',
'U': '..-', 'u': '..-',
'V': '...-', 'v': '...-',
'W': '.--', 'w': '.--',
'X': '-..-', 'x': '-..-',
'Y': '-.--', 'y': '-.--',
'Z': '--..', 'z': '--..',
'0': '-----',
'1': '.----',
'2': '..---',
'3': '...--',
'4': '....-',
'5': '.....',
'6': '-....',
'7': '--...',
'8': '---..',
'9': '----.',
',': '--..--',
'.': '.-.-.-',
'?': '..--..',
';': '-.-.-.',
':': '---...',
"'": '.----.',
'-': '-....-',
'/': '-..-.',
'(': '-.--.-',
')': '-.--.-',
'_': '..--.-',
' ': ' '
}
# If we play at 44.1 kHz (which we do), then if we produce one sine
# wave in 100 samples, we get a tone of 441 Hz. If we produce two
# sine waves in these 100 samples, we get a tone of 882 Hz. 882 Hz
# appears to be a nice one for playing morse code.
def mkwave(octave):
global sinewave, nowave
sinewave = ''
n = int(FRAMERATE / BASEFREQ)
for i in range(n):
val = int(math.sin(2 * math.pi * i * octave / n) * 0x7fff)
sample = chr((val >> 8) & 255) + chr(val & 255)
sinewave = sinewave + sample[:SAMPWIDTH]
nowave = '\0' * (n*SAMPWIDTH)
mkwave(OCTAVE)
class BufferedAudioDev:
def __init__(self, *args):
import audiodev
self._base = apply(audiodev.AudioDev, args)
self._buffer = []
self._filled = 0
self._addmethods(self._base, self._base.__class__)
def _addmethods(self, inst, cls):
for name in cls.__dict__.keys():
if not hasattr(self, name):
try:
setattr(self, name, getattr(inst, name))
except:
pass
for basecls in cls.__bases__:
self._addmethods(self, inst, basecls)
def writeframesraw(self, frames):
self._buffer.append(frames)
self._filled = self._filled + len(frames)
if self._filled >= QSIZE:
self.flush()
def wait(self):
self.flush()
self._base.wait()
def flush(self):
print 'flush: %d blocks, %d bytes' % (len(self._buffer), self._filled)
if self._buffer:
import string
self._base.writeframes(string.joinfields(self._buffer, ''))
self._buffer = []
self._filled = 0
def main(args = sys.argv[1:]):
import getopt, string
try:
opts, args = getopt.getopt(args, 'o:p:')
except getopt.error:
sys.stderr.write('Usage ' + sys.argv[0] +
' [ -o outfile ] [ args ] ...\n')
sys.exit(1)
dev = None
for o, a in opts:
if o == '-o':
import aifc
dev = aifc.open(a, 'w')
dev.setframerate(FRAMERATE)
dev.setsampwidth(SAMPWIDTH)
dev.setnchannels(1)
if o == '-p':
mkwave(string.atoi(a))
if not dev:
dev = BufferedAudioDev()
dev.setoutrate(FRAMERATE)
dev.setsampwidth(SAMPWIDTH)
dev.setnchannels(1)
dev.close = dev.stop
if args:
line = string.join(args)
else:
line = sys.stdin.readline()
while line:
print line
mline = morse(line)
print mline
play(mline, dev)
if hasattr(dev, 'wait'):
dev.wait()
if not args:
line = sys.stdin.readline()
else:
line = ''
dev.close()
# Convert a string to morse code with \001 between the characters in
# the string.
def morse(line):
res = ''
for c in line:
try:
res = res + morsetab[c] + '\001'
except KeyError:
pass
return res
# Play a line of morse code.
def play(line, dev):
for c in line:
if c == '.':
sine(dev, DOT)
elif c == '-':
sine(dev, DAH)
else:
pause(dev, DAH)
pause(dev, DOT)
def sine(dev, length):
dev.writeframesraw(sinewave*length)
def pause(dev, length):
dev.writeframesraw(nowave*length)
if __name__ == '__main__' or sys.argv[0] == __name__:
main()
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Page.extractions_file'
db.add_column(u'webapp_page', 'extractions_file',
self.gf('django.db.models.fields.CharField')(max_length=200, null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Page.extractions_file'
db.delete_column(u'webapp_page', 'extractions_file')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'webapp.page': {
'Meta': {'object_name': 'Page'},
'extractions_file': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'website': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pages'", 'to': u"orm['webapp.Website']"})
},
u'webapp.website': {
'Meta': {'object_name': 'Website'},
'embed_src': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'unique': 'True', 'null': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'websites'", 'to': u"orm['auth.User']"})
}
}
complete_apps = ['webapp']
|
#!/usr/bin/env python
import os
import sys
import math
from PIL import Image
def create_tiles(image_path, base_path, verbose):
# parameters
tile_size = (256, 256)
verbose = int(verbose)
# read image file and get dimensions
im = Image.open(image_path).convert("RGBA")
(image_width, image_height) = im.size
tile_width, tile_height = tile_size
cols = int(math.ceil(float(image_width)/float(tile_width)))
rows = int(math.ceil(float(image_height)/float(tile_height)))
max_zoom = int(max(math.ceil(math.log(cols, 2)), math.ceil(math.log(rows, 2))))
if verbose:
sys.stdout.write("Got image of size %i x %i, creating %i x %i tiles of size %i x %i.\n" % (image_width, image_height, cols, rows, tile_width, tile_height))
if verbose:
sys.stdout.write("Creating basic tiles...\n")
for x in range(0, cols):
for y in range(0, rows):
left = x * tile_width
right = left + tile_width
upper = y * tile_height
lower = upper + tile_height
filename = base_path + '/' + str(max_zoom) + '/' + str(x) + "/" + str(y) + ".png"
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
im.crop((left, upper, right, lower)).save(filename)
if verbose:
sys.stdout.write("\r%i / %i" % (x * rows + y + 1, rows * cols))
sys.stdout.flush()
if verbose:
sys.stdout.write("\n")
del im
if verbose:
sys.stdout.write("Creating overview tiles...\n")
for z in range(max_zoom - 1, -1, -1):
cols = int(math.ceil(cols / 2.0))
rows = int(math.ceil(rows / 2.0))
for x in range(0, cols):
for y in range(0, rows):
filename = base_path + '/' + str(z) + '/' + str(x) + "/" + str(y) + ".png"
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
# get (up to) four tiles from previous zoom layer to compose new tile
tile = Image.new("RGBA", tile_size)
try:
partim = Image.open(base_path + '/' + str(z + 1) + '/' + str(x * 2) + '/' + str(y * 2) + ".png")
tile.paste(partim.resize((tile_width / 2, tile_height / 2), Image.ANTIALIAS), (0,0))
except:
pass
try:
partim = Image.open(base_path + '/' + str(z + 1) + '/' + str(x * 2 + 1) + '/' + str(y * 2) + ".png")
tile.paste(partim.resize((tile_width / 2, tile_height / 2), Image.ANTIALIAS), (tile_width / 2,0))
except:
pass
try:
partim = Image.open(base_path + '/' + str(z + 1) + '/' + str(x * 2) + '/' + str(y * 2 + 1) + ".png")
tile.paste(partim.resize((tile_width / 2, tile_height / 2), Image.ANTIALIAS), (0, tile_height / 2))
except:
pass
try:
partim = Image.open(base_path + '/' + str(z + 1) + '/' + str(x * 2 + 1) + '/' + str(y * 2 + 1) + ".png")
tile.paste(partim.resize((tile_width / 2, tile_height / 2), Image.ANTIALIAS), (tile_width / 2, tile_height / 2))
except:
pass
tile.save(filename)
if verbose:
sys.stdout.write("\r%i / %i" % (x * rows + y + 1, rows * cols))
sys.stdout.flush()
if verbose:
sys.stdout.write("\n")
if verbose:
sys.stdout.write("\n")
if __name__ == "__main__":
create_tiles(sys.argv[1], sys.argv[2], sys.argv[3])
|
<?php
include 'logincheck.php';
include 'html/header.php';
if(isset($_POST['invite']))
{ $invite = strtolower($_POST['invite']); }
else
{ echo '<h1>No Invite Code Entered</h1>'; exit; }
$row = mysql_fetch_array(mysql_query("SELECT Count(id) FROM users WHERE Invite = '$invite'"));
$count = $row['Count(id)'];
/* when you remove the invite feature, please update the GotBanners plugin page on Gotchance.com */
switch($invite)
{
case "brandon": if($count >= 1000) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "garcya": if($count >= 1000) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "andaka": if($count >= 300) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "maelkool": if($count >= 300) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "maisblogs": if($count >= 300) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "ollieparsley": if($count >= 300) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "techjuicer": if($count >= 300) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "webjuice": if($count >= 300) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "siteguide": if($count >= 300) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "makeuseof": if($count >= 2500) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "webappers": if($count >= 1001) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "techcrunch": if($count >= 300) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "mashable": if($count >= 300) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "centernetworks": if($count >= 300) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "readwriteweb": if($count >= 300) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "gotchance": if($count >= 1000) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "johnchow": if($count >= 200) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "bloggingexperiment": if($count >= 200) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "johntp": if($count >= 1000) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "shoemoney": if($count >= 200) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "problogger": if($count >= 1000) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "netbusinessblog": if($count >= 200) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "carlocab": if($count >= 250) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "techfold": if($count >= 250) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "jeffro2pt0": if($count >= 200) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
case "darin": if($count >= 250) { echo '<h1>Invite Code Expired</h1>'; exit; } break;
default: echo '<h1>Invalid Invite Code</h1>'; exit; break;
}
echo '<script language="javascript" src="html/register.js"></script>';
?>
<h1>Register</h1>
<div id="fullarea">
<form name="regform" id="regform" onsubmit="register('<?php echo $invite; ?>');" >
<table width="100%" border="0" cellpadding="5" cellspacing="0" style="font-size: 18px;">
<tr>
<td align="right">Name </td>
<td><input name="name" type="text" id="name" class="bigbox" /></td>
</tr>
<tr>
<td align="right">Email Address </td>
<td><input name="email" type="text" id="email" class="bigbox" /></td>
</tr>
<tr>
<td align="right">Username </td>
<td><input name="username" type="text" id="username" class="bigbox" /></td>
</tr>
<tr>
<td align="right">Password </td>
<td><input name="password" type="password" id="password" class="bigbox" /></td>
</tr>
<tr>
<td align="right">Confirm Password </td>
<td><input name="confirm" type="password" id="confirm" class="bigbox" /></td>
</tr>
<tr>
<td colspan="2" align="center">
<div id="regarea" style="font-size: 15px; margin-bottom: 10px;"></div>
<input type="button" value="Register" name="reg" onclick="register('<?php echo $invite; ?>');" style="font-size: 18px;" />
</td>
</tr>
</table>
</form>
</div>
<?php
include 'html/footer.php';
?>
|
# Copyright (c) 2013 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Resource verification business logic.
"""
from barbican.common import utils
LOG = utils.getLogger(__name__)
def verify(verification):
"""Verifies if a resource is 'valid' for an action or not.
Based on the target resource information in the supplied verification
entity this function determines if it is valid to use for the specified
action. The supplied entity is then updated with the processing result.
:param verification: A Verification entity
"""
if 'image' == verification.resource_type:
verification.is_verified = True
|
/*jslint browser: true */
(function() {
"use strict";
var originalTitle = document.title,
originalURL = document.location.href.replace(document.location.search, "") + "?name=",
qShareURL = document.querySelector("section a");
/**
* Generate a sharing URL.
* @param {String} name The person's name.
*/
function makeShareLink(name) {
qShareURL.innerHTML = originalURL + decodeURIComponent(name);
qShareURL.href = originalURL + encodeURIComponent(name);
}
/**
* Update the page title with the person's name.
* @param {String} name The person's name.
*/
function updatePageTitle(name) {
document.title = `${decodeURIComponent(name)} ${originalTitle}`;
}
// Initial setup of the share link
qShareURL.innerHTML = originalURL;
qShareURL.href = originalURL;
// Update everything on keypress
document.querySelector("header input").addEventListener("input", function() {
updatePageTitle(this.value);
makeShareLink(this.value);
});
window.onload = function() {
var qs = window.location.search;
// No query string was given
if (!/^\?name=.+?$/.test(qs)) {
return false;
}
// Get just the name and update all displays
var name = qs.split("=")[1];
updatePageTitle(name);
makeShareLink(name);
document.querySelector("header input").value = decodeURIComponent(name);
// Test for HTML5 audio compatibility, preferring MP3 audio
// Taken from http://diveintohtml5.info/everything.html#audio-mp3
var _a = document.createElement("audio");
var audioFile = (!!(_a.canPlayType && _a.canPlayType("audio/mpeg;").replace(/no/, ""))) ?
"audio/congratulations.mp3" : "audio/congratulations.ogg";
var congrats = new Audio(audioFile);
congrats.load();
congrats.play();
};
}());
|
////////////////////////////////////////////////////////////////
//
// Copyright (C) 2005 Affymetrix, Inc.
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License (version 2) as
// published by the Free Software Foundation.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program;if not, write to the
//
// Free Software Foundation, Inc.,
// 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
////////////////////////////////////////////////////////////////
#ifndef _CNReporter_H_
#define _CNReporter_H_
/**
* @file CNReporter.h
*
* @brief This header contains the CNReporter class definition.
*/
#include "copynumber/CNAnalysisMethod.h"
#include "copynumber/CNExperiment.h"
#include "copynumber/CNSegment.h"
//
#include "util/AffxArray.h"
#include "util/AffxConv.h"
#include "util/BaseEngine.h"
#include "util/CalvinToText.h"
#include "util/Err.h"
#include "util/Guid.h"
#include "util/Util.h"
#include "util/Verbose.h"
//
#ifndef _MSC_VER
#include <sys/types.h>
#include <netinet/in.h>
#include <inttypes.h>
#endif
using namespace std;
class CNReporterMethod : public CNAnalysisMethod
{
protected:
affymetrix_calvin_io::DataSetWriter* m_pset;
public:
CNReporterMethod();
void setDataSetWriter(affymetrix_calvin_io::DataSetWriter& set);
virtual int getRowCount();
};
/**
* @brief A base class for copy number reporters.
*
*/
class CNReporter
{
protected:
BaseEngine* m_pEngine;
CNExperiment* m_pobjExperiment;
CNProbeSetArray* m_pvProbeSets;
AffxArray<CNAnalysisMethod>* m_pvMethods;
AffxString m_strARRFileName;
void wavinessSegCounts(int& segCountLoss, int& segCountGain, float& sd);
std::string wavinessAmplitudes();
public:
CNReporter();
virtual ~CNReporter();
CNExperiment* getExperiment();
CNProbeSetArray* getProbeSets();
AffxArray<CNAnalysisMethod>* getMethods();
virtual void defineOptions(BaseEngine& e);
virtual void checkOptions(BaseEngine& e);
/**
* @brief Setup the reporter to be run
* @param BaseEngine& - The engine associated with this reporter
* @param CNExperiment& - The experiment
* @param CNProbeSetArray& - The probe set vector associated with the experiment
* @param AffxArray<CNAnalysisMethod>& - The analysis methods to extract data from
*/
void setup(BaseEngine& engine, CNExperiment& objExperiment, CNProbeSetArray& vProbeSets, AffxArray<CNAnalysisMethod>& vMethods);
/**
* @brief Is the reporter setup to be rrun
*/
void isSetup();
virtual void run() = 0;
AffxString getArrayName();
/**
* @brief Load a header parameter from individual comppnents
* @param const AffxString& - The parameter name
* @param PgOpt::PgOptType - The parameter type
* @param const AffxString& - The parameter value
* @param affymetrix_calvin_parameter::ParameterNameValueType& - The parameter to load
*/
void loadParam(const AffxString& strName, PgOpt::PgOptType type, const AffxString& strValue, affymetrix_calvin_parameter::ParameterNameValueType& param);
static void loadBuffer(char* pBuffer, int& iIndex, AffxString& str, int iLength = -1);
static void loadBuffer(char* pBuffer, int& iIndex, unsigned char uc);
static void loadBuffer(char* pBuffer, int& iIndex, char c);
static void loadBuffer(char* pBuffer, int& iIndex, unsigned int ui);
static void loadBuffer(char* pBuffer, int& iIndex, float f);
static AffxString prepareAscii(const AffxString& str, int iLength);
};
struct wavSortCriterion : binary_function<pair<int, float>, pair<int, float>, bool>
{
bool operator()(const pair<int, float>& lhs, const pair<int, float>& rhs) const {
return lhs.first < rhs.first;
}
};
#endif
|
/**
* @desc Return first item of a list
*/
Handlebars.registerHelper('first', function(context, options) {
var ret = '';
if (context.length)
ret = context[0];
return new Handlebars.SafeString(ret);
});
|
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.pinpoint.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.pinpoint.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* ImportJobResponse JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ImportJobResponseJsonUnmarshaller implements Unmarshaller<ImportJobResponse, JsonUnmarshallerContext> {
public ImportJobResponse unmarshall(JsonUnmarshallerContext context) throws Exception {
ImportJobResponse importJobResponse = new ImportJobResponse();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("ApplicationId", targetDepth)) {
context.nextToken();
importJobResponse.setApplicationId(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("CompletedPieces", targetDepth)) {
context.nextToken();
importJobResponse.setCompletedPieces(context.getUnmarshaller(Integer.class).unmarshall(context));
}
if (context.testExpression("CompletionDate", targetDepth)) {
context.nextToken();
importJobResponse.setCompletionDate(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("CreationDate", targetDepth)) {
context.nextToken();
importJobResponse.setCreationDate(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Definition", targetDepth)) {
context.nextToken();
importJobResponse.setDefinition(ImportJobResourceJsonUnmarshaller.getInstance().unmarshall(context));
}
if (context.testExpression("FailedPieces", targetDepth)) {
context.nextToken();
importJobResponse.setFailedPieces(context.getUnmarshaller(Integer.class).unmarshall(context));
}
if (context.testExpression("Failures", targetDepth)) {
context.nextToken();
importJobResponse.setFailures(new ListUnmarshaller<String>(context.getUnmarshaller(String.class)).unmarshall(context));
}
if (context.testExpression("Id", targetDepth)) {
context.nextToken();
importJobResponse.setId(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("JobStatus", targetDepth)) {
context.nextToken();
importJobResponse.setJobStatus(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("TotalFailures", targetDepth)) {
context.nextToken();
importJobResponse.setTotalFailures(context.getUnmarshaller(Integer.class).unmarshall(context));
}
if (context.testExpression("TotalPieces", targetDepth)) {
context.nextToken();
importJobResponse.setTotalPieces(context.getUnmarshaller(Integer.class).unmarshall(context));
}
if (context.testExpression("TotalProcessed", targetDepth)) {
context.nextToken();
importJobResponse.setTotalProcessed(context.getUnmarshaller(Integer.class).unmarshall(context));
}
if (context.testExpression("Type", targetDepth)) {
context.nextToken();
importJobResponse.setType(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return importJobResponse;
}
private static ImportJobResponseJsonUnmarshaller instance;
public static ImportJobResponseJsonUnmarshaller getInstance() {
if (instance == null)
instance = new ImportJobResponseJsonUnmarshaller();
return instance;
}
}
|
Package.describe({
name: 'tinytest-harness',
version: '0.0.2',
summary: 'In development, lets your app define Tinytests, run them and see results',
documentation: null
});
Package.onUse(function(api) {
// XXX this needs to change. We'll probably have `testOnly` a la
// `debugOnly`.
if (global.testCommandMetadata) {
api.imply('tinytest');
api.imply('test-helpers');
api.imply('test-in-browser');
api.use('test-in-browser');
api.export('runTests');
}
});
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BrockAllen.OAuth2
{
public class Constants
{
public class ClaimTypes
{
public const string IdentityProvider = "http://schemas.microsoft.com/accesscontrolservice/2010/07/claims/identityprovider";
}
}
}
|
#include <stdio.h>
#include <stdlib.h>
int cmp(const void *x, const void *y)
{
return *(int *) x - *(int *) y;
}
int main()
{
FILE *f = fopen("INP.TXT", "r");
int n, i, *a, r = 1;
fscanf(f, "%d", &n);
a = malloc(n * sizeof(int));
for (i = 0; i < n; i++)
fscanf(f, "%d", a + i);
fclose(f);
qsort(a, n, sizeof(int), cmp);
for (i = 1; i < n; i++)
if (a[i] > a[i - 1])
r++;
f = fopen("OUT.TXT", "w");
fprintf(f, "%d\n", r);
fclose(f);
return 0;
}
|
import re
_recid_volume = "HA.II.{:02d}"
_recid_single = "HA.II.{:02d}.{:03d}"
_recid_double = "HA.II.{:02d}.{:03d}-{:03d}"
_recpattern = re.compile(
"^HA\.II\. (\d{2}) (?:\.(\d{3}))? (?:-(\d{3}))? $", re.X)
_imgname = "Htw-berlin-stoffmuster-ha02-{:02d}-{:03d}.{}"
_imgname_double = "Htw-berlin-stoffmuster-ha02-{:02d}-{:03d}-{:03d}.{}"
_imgpattern = re.compile(
"^Htw-berlin-stoffmuster-ha02- (\d{2}) -(\d{3}) (?:-(\d{3}))?", re.X)
def split_recid(recid):
match = _recpattern.match(recid)
if match is None:
raise ValueError("Invalid record id pattern: {}".format(recid))
volid = int(match.group(1))
page1 = int(match.group(2)) if match.group(2) else None
page2 = int(match.group(3)) if match.group(3) else None
return volid, page1, page2
def recid2img(recid, ext="jpg"):
volid, page1, page2 = split_recid(recid)
if page1 is None:
raise ValueError("Invalid record id, page missing: {}".format(recid))
if page2 is not None:
return _imgname_double.format(volid, page1, page2, ext)
return _imgname.format(volid, page1, ext)
def img2recids(imgname):
match = _imgpattern.match(imgname)
if match is None:
raise ValueError("Invalid image name pattern: {}".format(imgname))
volid = int(match.group(1))
page1 = int(match.group(2))
page2 = int(match.group(3)) if match.group(3) else None
recid = _recid_single if page2 is None else _recid_double
return (_recid_volume.format(volid),
recid.format(volid, page1, page2),
_recid_single.format(volid, page1) if page2 is not None else None,
_recid_single.format(volid, page2) if page2 is not None else None)
|
#include "cepscore.h"
Cepscore::Cepscore()
{
}
|
using System;
using Server.Engines.Quests;
using Server.Engines.Quests.Hag;
using Server.Mobiles;
using Server.Network;
using Server.Engines.Craft;
namespace Server.Items
{
[Flipable(0x14F5, 0x14F6)]
public class Spyglass : Item, IResource
{
private CraftResource _Resource;
private Mobile _Crafter;
private ItemQuality _Quality;
[CommandProperty(AccessLevel.GameMaster)]
public CraftResource Resource { get { return _Resource; } set { _Resource = value; _Resource = value; Hue = CraftResources.GetHue(_Resource); InvalidateProperties(); } }
[CommandProperty(AccessLevel.GameMaster)]
public Mobile Crafter { get { return _Crafter; } set { _Crafter = value; InvalidateProperties(); } }
[CommandProperty(AccessLevel.GameMaster)]
public ItemQuality Quality { get { return _Quality; } set { _Quality = value; InvalidateProperties(); } }
public bool PlayerConstructed { get { return true; } }
[Constructable]
public Spyglass()
: base(0x14F5)
{
Weight = 3.0;
}
public Spyglass(Serial serial)
: base(serial)
{
}
public override void OnDoubleClick(Mobile from)
{
from.LocalOverheadMessage(MessageType.Regular, 0x3B2, 1008155); // You peer into the heavens, seeking the moons...
from.Send(new MessageLocalizedAffix(from.NetState, from.Serial, from.Body, MessageType.Regular, 0x3B2, 3, 1008146 + (int)Clock.GetMoonPhase(Map.Trammel, from.X, from.Y), "", AffixType.Prepend, "Trammel : ", ""));
from.Send(new MessageLocalizedAffix(from.NetState, from.Serial, from.Body, MessageType.Regular, 0x3B2, 3, 1008146 + (int)Clock.GetMoonPhase(Map.Felucca, from.X, from.Y), "", AffixType.Prepend, "Felucca : ", ""));
PlayerMobile player = from as PlayerMobile;
if (player != null)
{
QuestSystem qs = player.Quest;
if (qs is WitchApprenticeQuest)
{
FindIngredientObjective obj = qs.FindObjective(typeof(FindIngredientObjective)) as FindIngredientObjective;
if (obj != null && !obj.Completed && obj.Ingredient == Ingredient.StarChart)
{
int hours, minutes;
Clock.GetTime(from.Map, from.X, from.Y, out hours, out minutes);
if (hours < 5 || hours > 17)
{
player.SendLocalizedMessage(1055040); // You gaze up into the glittering night sky. With great care, you compose a chart of the most prominent star patterns.
obj.Complete();
}
else
{
player.SendLocalizedMessage(1055039); // You gaze up into the sky, but it is not dark enough to see any stars.
}
}
}
}
}
public override void GetProperties(ObjectPropertyList list)
{
base.GetProperties(list);
if (_Crafter != null)
{
list.Add(1050043, _Crafter.TitleName); // crafted by ~1_NAME~
}
if (_Quality == ItemQuality.Exceptional)
{
list.Add(1060636); // Exceptional
}
}
public override void AddNameProperty(ObjectPropertyList list)
{
if (_Resource > CraftResource.Iron)
{
list.Add(1053099, "#{0}\t{1}", CraftResources.GetLocalizationNumber(_Resource), String.Format("#{0}", LabelNumber.ToString())); // ~1_oretype~ ~2_armortype~
}
else
{
base.AddNameProperty(list);
}
}
public virtual int OnCraft(int quality, bool makersMark, Mobile from, CraftSystem craftSystem, Type typeRes, ITool tool, CraftItem craftItem, int resHue)
{
Quality = (ItemQuality)quality;
if (makersMark)
Crafter = from;
if (!craftItem.ForceNonExceptional)
{
if (typeRes == null)
typeRes = craftItem.Resources.GetAt(0).ItemType;
Resource = CraftResources.GetFromType(typeRes);
}
return quality;
}
public override void Serialize(GenericWriter writer)
{
base.Serialize(writer);
writer.Write((int)1); // version
writer.Write((int)_Resource);
writer.Write(_Crafter);
writer.Write((int)_Quality);
}
public override void Deserialize(GenericReader reader)
{
base.Deserialize(reader);
int version = reader.ReadInt();
switch (version)
{
case 1:
_Resource = (CraftResource)reader.ReadInt();
_Crafter = reader.ReadMobile();
_Quality = (ItemQuality)reader.ReadInt();
break;
case 0:
break;
}
}
}
}
|
/**
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import i18n from 'i18next';
import { initReactI18next } from 'react-i18next';
import translationEN from './common/locales/en/translation.json';
import translationES from './common/locales/es/translation.json';
i18n
.use(initReactI18next) // passes i18n down to react-i18next
.init({
resources: {
en: {
translation: translationEN
},
es: {
translation: translationES
}
},
lng: 'en',
fallbackLng: 'en',
interpolation: {
escapeValue: false
}
});
export default i18n;
|
#ifndef BUW_SHAPE_HPP
#define BUW_SHAPE_HPP
#define _USE_MATH_DEFINES
#include <iostream>
#include <string>
#include <glm/glm.hpp>
#include "material.hpp"
#include "ray.hpp"
class Shape {
public:
Shape();
/* virtual */ ~Shape();
Shape(std::string const& name);
Shape(Material const& material);
Shape(std::string const& name, Material const& material);
std::string const& getName() const;
Material const& getMaterial() const;
virtual double area() const = 0;
virtual double volume() const = 0;
virtual std::ostream& print(std::ostream& os) const;
virtual bool intersect(Ray const& r, float& t) = 0; // pure virtual
private:
std::string name_;
Material material_;
};
std::ostream& operator<<(std::ostream& os, Shape const& s);
#endif
|
"""
Many-to-one relationships
To define a many-to-one relationship, use ``ForeignKey()``.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
reporter = models.ForeignKey(Reporter, models.CASCADE)
def __str__(self):
return self.headline
class Meta:
ordering = ('headline',)
@python_2_unicode_compatible
class City(models.Model):
id = models.BigAutoField(primary_key=True)
name = models.CharField(max_length=50)
def __str__(self):
return self.name
@python_2_unicode_compatible
class District(models.Model):
city = models.ForeignKey(City, models.CASCADE)
name = models.CharField(max_length=50)
def __str__(self):
return self.name
# If ticket #1578 ever slips back in, these models will not be able to be
# created (the field names being lower-cased versions of their opposite
# classes is important here).
class First(models.Model):
second = models.IntegerField()
class Second(models.Model):
first = models.ForeignKey(First, models.CASCADE, related_name='the_first')
# Protect against repetition of #1839, #2415 and #2536.
class Third(models.Model):
name = models.CharField(max_length=20)
third = models.ForeignKey('self', models.SET_NULL, null=True, related_name='child_set')
class Parent(models.Model):
name = models.CharField(max_length=20, unique=True)
bestchild = models.ForeignKey('Child', models.SET_NULL, null=True, related_name='favored_by')
class Child(models.Model):
name = models.CharField(max_length=20)
parent = models.ForeignKey(Parent, models.CASCADE)
class ToFieldChild(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE, to_field='name')
# Multiple paths to the same model (#7110, #7125)
@python_2_unicode_compatible
class Category(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class Record(models.Model):
category = models.ForeignKey(Category, models.CASCADE)
@python_2_unicode_compatible
class Relation(models.Model):
left = models.ForeignKey(Record, models.CASCADE, related_name='left_set')
right = models.ForeignKey(Record, models.CASCADE, related_name='right_set')
def __str__(self):
return "%s - %s" % (self.left.category.name, self.right.category.name)
# Test related objects visibility.
class SchoolManager(models.Manager):
def get_queryset(self):
return super(SchoolManager, self).get_queryset().filter(is_public=True)
class School(models.Model):
is_public = models.BooleanField(default=False)
objects = SchoolManager()
class Student(models.Model):
school = models.ForeignKey(School, models.CASCADE)
|
#!/usr/bin/env python
"""Test tvnamer's filename parser
"""
from helpers import assertEquals
from tvnamer.utils import (FileParser, DatedEpisodeInfo, NoSeasonEpisodeInfo)
from test_files import files
def test_autogen_names():
"""Tests set of standard filename formats with various data
"""
"""Mostly based on scene naming standards:
http://tvunderground.org.ru/forum/index.php?showtopic=8488
%(seriesname)s becomes the seriesname,
%(seasno)s becomes the season number,
%(epno)s becomes the episode number.
Each is string-formatted with seasons from 0 to 10, and ep 0 to 10
"""
name_formats = [
'%(seriesname)s.s%(seasno)de%(epno)d.dsr.nf.avi', # seriesname.s01e02.dsr.nf.avi
'%(seriesname)s.S%(seasno)dE%(epno)d.PROPER.dsr.nf.avi', # seriesname.S01E02.PROPER.dsr.nf.avi
'%(seriesname)s.s%(seasno)d.e%(epno)d.avi', # seriesname.s01.e02.avi
'%(seriesname)s-s%(seasno)de%(epno)d.avi', # seriesname-s01e02.avi
'%(seriesname)s-s%(seasno)de%(epno)d.the.wrong.ep.name.avi', # seriesname-s01e02.the.wrong.ep.name.avi
'%(seriesname)s - [%(seasno)dx%(epno)d].avi', # seriesname - [01x02].avi
'%(seriesname)s - [%(seasno)dx0%(epno)d].avi', # seriesname - [01x002].avi
'%(seriesname)s-[%(seasno)dx%(epno)d].avi', # seriesname-[01x02].avi
'%(seriesname)s [%(seasno)dx%(epno)d].avi', # seriesname [01x02].avi
'%(seriesname)s [%(seasno)dx%(epno)d] the wrong ep name.avi', # seriesname [01x02] epname.avi
'%(seriesname)s [%(seasno)dx%(epno)d] - the wrong ep name.avi', # seriesname [01x02] - the wrong ep name.avi
'%(seriesname)s - [%(seasno)dx%(epno)d] - the wrong ep name.avi', # seriesname - [01x02] - the wrong ep name.avi
'%(seriesname)s.%(seasno)dx%(epno)d.The_Wrong_ep_name.avi', # seriesname.01x02.epname.avi
'%(seriesname)s.%(seasno)d%(epno)02d.The Wrong_ep.names.avi', # seriesname.102.epname.avi
'%(seriesname)s_s%(seasno)de%(epno)d_The_Wrong_ep_na-me.avi', # seriesname_s1e02_epname.avi
'%(seriesname)s - s%(seasno)de%(epno)d - dsr.nf.avi', # seriesname - s01e02 - dsr.nf.avi
'%(seriesname)s - s%(seasno)de%(epno)d - the wrong ep name.avi', # seriesname - s01e02 - the wrong ep name.avi
'%(seriesname)s - s%(seasno)de%(epno)d - the wrong ep name.avi', # seriesname - s01e02 - the_wrong_ep_name!.avi
]
test_data = [
{'name': 'test_name_parser_unicode',
'description': 'Tests parsing show containing unicode characters',
'name_data': {'seriesname': 'T\xc3\xacnh Ng\xc6\xb0\xe1\xbb\x9di Hi\xe1\xbb\x87n \xc4\x90\xe1\xba\xa1i'}},
{'name': 'test_name_parser_basic',
'description': 'Tests most basic filename (simple seriesname)',
'name_data': {'seriesname': 'series name'}},
{'name': 'test_name_parser_showdashname',
'description': 'Tests with dash in seriesname',
'name_data': {'seriesname': 'S-how name'}},
{'name': 'test_name_parser_exclaim',
'description': 'Tests parsing show with exclamation mark',
'name_data': {'seriesname': 'Show name!'}},
{'name': 'test_name_parser_shownumeric',
'description': 'Tests with numeric show name',
'name_data': {'seriesname': '123'}},
{'name': 'test_name_parser_shownumericspaces',
'description': 'Tests with numeric show name, with spaces',
'name_data': {'seriesname': '123 2008'}},
]
for cdata in test_data:
# Make new wrapped function
def cur_test():
for seas in range(1, 11):
for ep in range(1, 11):
name_data = cdata['name_data']
name_data['seasno'] = seas
name_data['epno'] = ep
names = [x % name_data for x in name_formats]
for cur in names:
p = FileParser(cur).parse()
assertEquals(p.episodenumbers, [name_data['epno']])
assertEquals(p.seriesname, name_data['seriesname'])
# Only EpisodeInfo has seasonnumber
if not isinstance(p, (DatedEpisodeInfo, NoSeasonEpisodeInfo)):
assertEquals(p.seasonnumber, name_data['seasno'])
#end cur_test
cur_test.description = cdata['description']
yield cur_test
def check_case(curtest):
"""Runs test case, used by test_parsing_generator
"""
parser = FileParser(curtest['input'])
theep = parser.parse()
if theep.seriesname is None and curtest['parsedseriesname'] is None:
pass # allow for None seriesname
else:
assert theep.seriesname.lower() == curtest['parsedseriesname'].lower(), "%s == %s" % (
theep.seriesname.lower(),
curtest['parsedseriesname'].lower())
assertEquals(theep.episodenumbers, curtest['episodenumbers'])
if not isinstance(theep, (DatedEpisodeInfo, NoSeasonEpisodeInfo)):
assertEquals(theep.seasonnumber, curtest['seasonnumber'])
def test_parsing_generator():
"""Generates test for each test case in test_files.py
"""
for category, testcases in files.items():
for testindex, curtest in enumerate(testcases):
cur_tester = lambda x: check_case(x)
cur_tester.description = 'test_parsing_%s_%d: %r' % (
category, testindex, curtest['input'])
yield (cur_tester, curtest)
if __name__ == '__main__':
import nose
nose.main()
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for function_scopes module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.converters import function_scopes
from tensorflow.python.autograph.core import converter_testing
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
class FunctionBodyTransformerTest(converter_testing.TestCase):
@test_util.run_deprecated_v1
def test_basic(self):
def test_fn(l):
"""Docstring."""
a = 1
l += a
return l
with self.converted(test_fn, function_scopes, {}) as result:
result_op = result.test_fn(constant_op.constant(1))
self.assertIn('test_fn/', result_op.op.name)
self.assertEqual('Docstring.', result.test_fn.__doc__)
@test_util.run_deprecated_v1
def test_multiline_docstring(self):
tf = None
def test_fn():
"""First sentence.
Second sentence.
"""
return tf.constant(1)
with self.converted(test_fn, function_scopes, {},
(constant_op.constant,)) as result:
result_op = result.test_fn()
self.assertIn('test_fn/', result_op.op.name)
self.assertIn('First sentence.', result.test_fn.__doc__)
self.assertIn('Second sentence.', result.test_fn.__doc__)
@test_util.run_deprecated_v1
def test_nested_functions(self):
def test_fn(l):
def inner_fn(i):
return i + 1
l += 1
return l, inner_fn(l)
with self.converted(test_fn, function_scopes, {},
(ops.name_scope,)) as result:
first, second = result.test_fn(constant_op.constant(1))
self.assertIn('test_fn/', first.op.name)
self.assertNotIn('inner_fn', first.op.name)
self.assertIn('test_fn/inner_fn/', second.op.inputs[0].name)
@test_util.run_deprecated_v1
def test_method(self):
class TestClass(object):
def test_fn(self, l):
def inner_fn(i):
return i + 1
l += 1
return l, inner_fn(l)
ns = {'TestClass': TestClass}
node, ctx = self.prepare(TestClass, ns)
node = function_scopes.transform(node, ctx)
with self.compiled(node, {}, (ops.name_scope,)) as result:
first, second = result.TestClass().test_fn(constant_op.constant(1))
self.assertIn('test_fn/', first.op.name)
self.assertNotIn('inner_fn', first.op.name)
self.assertIn('test_fn/inner_fn/', second.op.inputs[0].name)
if __name__ == '__main__':
test.main()
|
<?php
/**************************************************************************/
/* PHP-NUKE: Advanced Content Management System */
/* ============================================ */
/* */
/* This is the language module with all the system messages */
/* */
/* If you made a translation go to the my website and send to me */
/* the translated file. Please keep the original text order by modules, */
/* and just one message per line, also double check your translation! */
/* */
/* You need to change the second quoted phrase, not the capital one! */
/* */
/* If you need to use double quotes (") remember to add a backslash (\), */
/* so your entry will look like: This is \"double quoted\" text. */
/* And, if you use HTML code, please double check it. */
/**************************************************************************/
define("_NEWS","News");
define("_FUNCTIONS","Funções");
define("_YES","sim");
define("_NO","Não");
define("_ALLTOPICS","Todos os Tópicos");
define("_CATEGORY","Categoria");
define("_SAVECHANGES","Salvar alterações");
define("_OK","Ok!");
define("_SAVE","Salvar");
define("_NOSUBJECT","Sem assunto");
define("_ARTICLES","Notícias");
define("_ALL","Todos(as)");
define("_AREYOUSURE","Você tem certeza de que incluiu um link? Verificou a ortografia?");
define("_SELECTTOPIC","Selecione o Tópico");
define("_OPTION","Opção");
define("_AUTHOR","Autor");
define("_NAME","Nome");
define("_DATE","Data");
define("_TITLE","Título");
define("_HOUR","Hora");
define("_EDITCATEGORY","Editar categoria");
define("_ARTICLEADMIN","Administração de Notícias");
define("_ADDARTICLE","Adicionar nova notícia");
define("_STORYTEXT","Texto da notícia");
define("_EXTENDEDTEXT","Texto extendido");
define("_ARESUREURL","(Você tem certeza de que incluiu um link? Verificou a ortografia?)");
define("_PUBLISHINHOME","Publicar na Home?");
define("_ONLYIFCATSELECTED","Funciona apenas se a categoria <i>Artigos</i> não estiver selecionada");
define("_ADD","Adicionar");
define("_PROGRAMSTORY","Você deseja programar uma data para essa notícia?");
define("_NOWIS","Agora são");
define("_DAY","Dia");
define("_UMONTH","Mês");
define("_YEAR","ano");
define("_PREVIEWSTORY","Pré-visualizar notícia");
define("_POSTSTORY","Postar notícia");
define("_REMOVESTORY","Você tem certeza de que quer apagar a notícia de ID #");
define("_ANDCOMMENTS","e todos os seus comentários?");
define("_CATEGORIESADMIN","Administração de Categorias");
define("_CATEGORYADD","Adicionar nova Categoria");
define("_CATNAME","nome da Categoria");
define("_NOARTCATEDIT","Você não pode editar a Categoria <i>Articles</i>");
define("_ASELECTCATEGORY","Selecione a Categoria");
define("_CATEGORYNAME","Nome da Categoria");
define("_DELETECATEGORY","apagar Categoria");
define("_SELECTCATDEL","Selecione a Categoria a ser apagada");
define("_CATDELETED","Categoria apagada!");
define("_WARNING","<b>Aviso</a>");
define("_THECATEGORY","A Categoria");
define("_HAS","possui");
define("_STORIESINSIDE","notícias publicadas");
define("_DELCATWARNING1","Você pode apagar esta Categoria e TODAS as notícias e comentários publicados nela!");
define("_DELCATWARNING2","ou você pode MOVER todas as notícias e comentários para uma nova Categoria.");
define("_DELCATWARNING3","O que você deseja fazer?");
define("_YESDEL","Sim, Apague TUDO!!!");
define("_NOMOVE","Não! Mova minhas notícias");
define("_MOVESTORIES","Mover notícias para uma nova Categoria");
define("_ALLSTORIES","TODAS as notícias publicadas em");
define("_WILLBEMOVED","serão MOVIDAS.");
define("_SELECTNEWCAT","Por favor, selecione a nova Categoria");
define("_MOVEDONE","As alterações foram realizadas com sucesso!!!");
define("_CATEXISTS","Esta Categoria já existe!");
define("_CATSAVED","Categoria salva!");
define("_GOTOADMIN","Ir para a Seção de Administração");
define("_CATADDED","Nova Categiria adicionada!");
define("_AUTOSTORYEDIT","Editar notícias automáticas");
define("_NOTES","Notas");
define("_CHNGPROGRAMSTORY","Selecione uma nova data para esta notícia:");
define("_SUBMISSIONSADMIN","Administração de notícias enviadas");
define("_DELETESTORY","Apagar notícias");
define("_EDITARTICLE","Editar notícia");
define("_NOSUBMISSIONS","Nenhum novo envio");
define("_NEWSUBMISSIONS","Submissões de novas notícias");
define("_NOTAUTHORIZED1","Você não tem autorização para tocar nesta notícia!");
define("_NOTAUTHORIZED2","Você não pode editar e/ou apagar notícias que não foram postadas por você!");
define("_POLLTITLE","Título da Enquete");
define("_POLLEACHFIELD","Coloque cada opção em um campo diferente!");
define("_ACTIVATECOMMENTS","Ativar comentários para esta notícia?");
define("_LANGUAGE","Idioma");
define("_ATTACHAPOLL","anexar uma Enquete a esta notícia?");
define("_LEAVEBLANKTONOTATTACH","(Deixe em branco para postar uma notícia sem anexar uma Enquete)<br>(<b>NOTA</b>: Notícias Automáticas/Programadas não podem ter Enquetes anexadas)");
define("_USERPROFILE","Perfil do usuário");
define("_EMAILUSER","e-mail do usuário");
define("_SENDPM","Enviar Mensagem Privada");
?>
|
import numpy as np
import scipy.io
import sys
import argparse
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Merge, Dropout, Reshape
from keras.layers.recurrent import LSTM
from keras.utils import np_utils, generic_utils
from keras.callbacks import ModelCheckpoint, RemoteMonitor
from sklearn.externals import joblib
from sklearn import preprocessing
from spacy.en import English
from utils import grouper, selectFrequentAnswers
from features import get_images_matrix, get_answers_matrix, get_questions_tensor_timeseries
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-num_hidden_units_mlp', type=int, default=1024)
parser.add_argument('-num_hidden_units_lstm', type=int, default=512)
parser.add_argument('-num_hidden_layers_mlp', type=int, default=3)
parser.add_argument('-num_hidden_layers_lstm', type=int, default=1)
parser.add_argument('-dropout', type=float, default=0.5)
parser.add_argument('-activation_mlp', type=str, default='tanh')
parser.add_argument('-num_epochs', type=int, default=100)
parser.add_argument('-model_save_interval', type=int, default=5)
parser.add_argument('-batch_size', type=int, default=128)
#TODO Feature parser.add_argument('-resume_training', type=str)
#TODO Feature parser.add_argument('-language_only', type=bool, default= False)
args = parser.parse_args()
word_vec_dim= 300
img_dim = 4096
max_len = 30
nb_classes = 1000
#get the data
questions_train = open('../data/preprocessed/questions_train2014.txt', 'r').read().decode('utf8').splitlines()
questions_lengths_train = open('../data/preprocessed/questions_lengths_train2014.txt', 'r').read().decode('utf8').splitlines()
answers_train = open('../data/preprocessed/answers_train2014_modal.txt', 'r').read().decode('utf8').splitlines()
images_train = open('../data/preprocessed/images_train2014.txt', 'r').read().decode('utf8').splitlines()
vgg_model_path = '../features/coco/vgg_feats.mat'
max_answers = nb_classes
questions_train, answers_train, images_train = selectFrequentAnswers(questions_train,answers_train,images_train, max_answers)
questions_lengths_train, questions_train, answers_train, images_train = (list(t) for t in zip(*sorted(zip(questions_lengths_train, questions_train, answers_train, images_train))))
#encode the remaining answers
labelencoder = preprocessing.LabelEncoder()
labelencoder.fit(answers_train)
nb_classes = len(list(labelencoder.classes_))
joblib.dump(labelencoder,'../models/labelencoder.pkl')
image_model = Sequential()
image_model.add(Reshape(input_shape = (img_dim,), dims=(img_dim,)))
language_model = Sequential()
if args.num_hidden_layers_lstm == 1:
language_model.add(LSTM(output_dim = args.num_hidden_units_lstm, return_sequences=False, input_shape=(max_len, word_vec_dim)))
else:
language_model.add(LSTM(output_dim = args.num_hidden_units_lstm, return_sequences=True, input_shape=(max_len, word_vec_dim)))
for i in xrange(args.num_hidden_layers_lstm-2):
language_model.add(LSTM(output_dim = args.num_hidden_units_lstm, return_sequences=True))
language_model.add(LSTM(output_dim = args.num_hidden_units_lstm, return_sequences=False))
model = Sequential()
model.add(Merge([language_model, image_model], mode='concat', concat_axis=1))
for i in xrange(args.num_hidden_layers_mlp):
model.add(Dense(args.num_hidden_units_mlp, init='uniform'))
model.add(Activation(args.activation_mlp))
model.add(Dropout(args.dropout))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
json_string = model.to_json()
model_file_name = '../models/lstm_1_num_hidden_units_lstm_' + str(args.num_hidden_units_lstm) + \
'_num_hidden_units_mlp_' + str(args.num_hidden_units_mlp) + '_num_hidden_layers_mlp_' + \
str(args.num_hidden_layers_mlp) + '_num_hidden_layers_lstm_' + str(args.num_hidden_layers_lstm)
open(model_file_name + '.json', 'w').write(json_string)
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
print 'Compilation done'
features_struct = scipy.io.loadmat(vgg_model_path)
VGGfeatures = features_struct['feats']
print 'loaded vgg features'
image_ids = open('../features/coco_vgg_IDMap.txt').read().splitlines()
img_map = {}
for ids in image_ids:
id_split = ids.split()
img_map[id_split[0]] = int(id_split[1])
nlp = English()
print 'loaded word2vec features...'
## training
print 'Training started...'
for k in xrange(args.num_epochs):
progbar = generic_utils.Progbar(len(questions_train))
for qu_batch,an_batch,im_batch in zip(grouper(questions_train, args.batch_size, fillvalue=questions_train[-1]),
grouper(answers_train, args.batch_size, fillvalue=answers_train[-1]),
grouper(images_train, args.batch_size, fillvalue=images_train[-1])):
timesteps = len(nlp(qu_batch[-1])) #questions sorted in descending order of length
X_q_batch = get_questions_tensor_timeseries(qu_batch, nlp, timesteps)
X_i_batch = get_images_matrix(im_batch, img_map, VGGfeatures)
Y_batch = get_answers_matrix(an_batch, labelencoder)
loss = model.train_on_batch([X_q_batch, X_i_batch], Y_batch)
progbar.add(args.batch_size, values=[("train loss", loss)])
if k%args.model_save_interval == 0:
model.save_weights(model_file_name + '_epoch_{:03d}.hdf5'.format(k))
model.save_weights(model_file_name + '_epoch_{:03d}.hdf5'.format(k))
if __name__ == "__main__":
main()
|
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Highcharts Essentials</title>
</head>
<body>
<div id="chart_container" style="width: 600px; height: 450px;"></div>
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="js/highcharts.js"></script>
<script>
(function() {
$( '#chart_container' ).highcharts({
chart: {
type: 'area'
},
title: {
text: 'Iron Ore Production'
},
xAxis: {
tickmarkPlacement: 'on',
categories: [2010, 2011, 2012, 2013]
},
yAxis: {
title: {
text: 'Revenue in billion USD'
}
},
plotOptions: {
area: {
stacking: 'normal'
}
},
series: [{
name: 'China',
data: [1070, 1330, 1310, 1320]
}, {
name: 'Australia',
data: [433, 488, 521, 530]
}, {
name: 'Brazil',
data: [370, 373, 398, 398]
}, {
name: 'India',
data: [230, 240, 144, 150]
}, {
name: 'Russia',
data: [101, 100, 105, 102]
}]
});
})();
</script>
</body>
</html>
|
<!-- Title for header bar (refer to www/index.html) -->
<ion-view title="Profile">
<!-- ======================
LEFT SIDE BUTTON
====================== -->
<ion-nav-buttons side="left">
<button menu-toggle="left" class="button button-icon icon ion-navicon"></button>
</ion-nav-buttons>
<!-- ======================
RIGHT SIDE BUTTON
====================== -->
<ion-nav-buttons side="right">
<button class="button button-icon icon ion-search"></button>
</ion-nav-buttons>
<!-- ======================
MAIN CONTENT SECTION
====================== -->
<ion-content class="has-header gem-ion-content">
<!-- ======================
GEMIONIC PROFILE SECTION
====================== -->
<div class="gem-profile-media">
<div class="gem-profile-details">
<img src="img/gemionic/profile-image.jpg" alt="" class="gem-circle profile-image">
<h3 class="gem-white">Samantha Aryon</h3>
<h5 class="gem-white">London, England</h5>
<!-- ionic row - .profile-stats helps with transparency and white borders -->
<div class="row profile-stats">
<div class="col stat">
<h6 class="gem-white uppercase">Following</h6>
<h6 class="gem-white">352</h6>
</div>
<div class="col stat">
<h6 class="gem-white uppercase">Following</h6>
<h6 class="gem-white">6233</h6>
</div>
<div class="col stat">
<h6 class="gem-white uppercase">Following</h6>
<h6 class="gem-white">564</h6>
</div>
</div>
<!-- End Ionic row -->
</div>
<!-- background image -->
<img src="img/gemionic/post-img.jpg" alt="" class="fluid-image profile-background-image">
</div>
<!-- ======================
END GEMIONIC PROFILE SECTION
====================== -->
<div class="text-center padding">
<button class="button button-balanced" ng-click="">Upload Image</button>
</div>
<!-- Standard Ionic List Markup -->
<div class="list">
<a class="item item-icon-left" href="#">
<i class="icon ion-email"></i>
Send Message
</a>
<a class="item item-icon-left" href="#">
<i class="icon ion-chatbubble-working"></i>
Instant Message
</a>
<a class="item item-icon-left item-button-right" href="#">
<i class="icon ion-social-facebook"></i>
Facebook Friends
<button class="button button-small button-positive uppercase">
Connect
</button>
</a>
<li class="item item-toggle item-icon-left">
<i class="icon ion-email"></i>
Syndicate Updates
<label class="toggle toggle-assertive">
<input type="checkbox">
<div class="track">
<div class="handle"></div>
</div>
</label>
</li>
</div>
<!-- End Standard Ionic List Markup -->
</ion-content>
</ion-view>
|
from __future__ import print_function
import os
from legacypipe.image import LegacySurveyImage
class MosaicImage(LegacySurveyImage):
'''
Class for handling images from the Mosaic3 camera processed by the
NOAO Community Pipeline.
'''
def __init__(self, survey, t):
super(MosaicImage, self).__init__(survey, t)
for attr in ['imgfn', 'dqfn', 'wtfn']:
fn = getattr(self, attr)
if os.path.exists(fn):
continue
# Workaround: exposure numbers 330667 through 330890 at
# least have some of the files named "v1" and some named
# "v2". Try both.
if 'v1' in fn:
fnother = fn.replace('v1', 'v2')
if os.path.exists(fnother):
print('Using', fnother, 'rather than', fn)
setattr(self, attr, fnother)
fn = fnother
elif 'v2' in fn:
fnother = fn.replace('v2', 'v1')
if os.path.exists(fnother):
print('Using', fnother, 'rather than', fn)
setattr(self, attr, fnother)
fn = fnother
def apply_amp_correction(self, img, invvar, x0, y0):
self.apply_amp_correction_northern(img, invvar, x0, y0)
def get_fwhm(self, primhdr, imghdr):
# exposure 88865 has SEEINGP1 in the primary header, nothing anywhere else,
# so FWHM in the CCDs file is NaN.
import numpy as np
print('mosaic get_fwhm: self.fwhm =', self.fwhm)
if not np.isfinite(self.fwhm):
self.fwhm = primhdr.get('SEEINGP1', 0.0)
return self.fwhm
def remap_invvar(self, invvar, primhdr, img, dq):
return self.remap_invvar_shotnoise(invvar, primhdr, img, dq)
def remap_dq(self, dq, header):
'''
Called by get_tractor_image() to map the results from read_dq
into a bitmask.
'''
from legacypipe.image import remap_dq_cp_codes
dq = remap_dq_cp_codes(dq, ignore_codes=[7]) # 8 also?
return dq
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
function startsWith(haystack, needle) {
if (haystack.length < needle.length) {
return false;
}
for (let i = 0; i < needle.length; i++) {
if (haystack[i] !== needle[i]) {
return false;
}
}
return true;
}
exports.startsWith = startsWith;
/**
* Determines if haystack ends with needle.
*/
function endsWith(haystack, needle) {
let diff = haystack.length - needle.length;
if (diff > 0) {
return haystack.lastIndexOf(needle) === diff;
}
else if (diff === 0) {
return haystack === needle;
}
else {
return false;
}
}
exports.endsWith = endsWith;
function convertSimple2RegExpPattern(pattern) {
return pattern.replace(/[\-\\\{\}\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&').replace(/[\*]/g, '.*');
}
exports.convertSimple2RegExpPattern = convertSimple2RegExpPattern;
//# sourceMappingURL=strings.js.map
|
$(function() {
//Tool Tip
tooltipContent = function($el, $tip) {
var match = $el.attr("id").match(/block_(\w+_)?reservation_(\d+)/);
var prefix = match[1] || "";
var id = match[2];
return $("#tooltip_" + prefix + "reservation_" + id).html();
}
$('.tip').tooltipsy({
content: tooltipContent,
hide: function (e, $el) {
$el.delay(500),
$el.fadeOut(10)
}
});
// Date select calendar
$(".datepicker").datepicker({
showOn: "button",
buttonText: "<i class='fa fa-calendar icon-large'>",
}).change(function() {
var form = $(this).parents('form');
var formUrl = form.attr('action');
form.attr('action', formUrl + '#' + lastHiddenInstrumentId());
form.submit();
});
//Get the Current Hour, create a class and add it the time div
time = function() {
$e = $('.current_time');
var currentTime = new Date();
// minutes since midnight
var minutes = currentTime.getHours() * 60 + currentTime.getMinutes();
// Cache the pixel to minute ratio based on where it's initially displayed
if (!window.PIXEL_TO_MINUTE_RATIO) {
var pixels = parseInt($e.css('left'));
window.PIXEL_TO_MINUTE_RATIO = (pixels / minutes).toFixed(2);
}
var pixels = Math.floor(minutes * PIXEL_TO_MINUTE_RATIO) + 'px'
$e.css('left', pixels);
};
time();
setInterval(time, 30000);
showOrHideCanceled = function() {
if ($('#show_canceled').is(':checked')) {
$('.status_canceled').fadeIn('fast');
} else {
$('.status_canceled').fadeOut('fast');
}
}
$('#show_canceled').change(showOrHideCanceled);
// no animation when first loading
$('.status_canceled').toggle($('#show_canceled').is(':checked'));
relayCheckboxes = $('.relay_checkbox :checkbox')
if (relayCheckboxes.length > 0) {
relayCheckboxes.bind('click', function(e) {
if (confirm("Are you sure you want to toggle the relay?")) {
$(this).parent().addClass("loading");
$.ajax({
url: $(this).data("relay-url"),
success: function(data) {
updateRelayStatus(data.instrument_status);
},
data: {
switch: $(this).is(":checked") ? "on" : "off"
},
dataType: 'json'
});
} else {
return false;
}
})
.toggleSwitch();
}
function loadRelayStatuses() {
$.ajax({
url: '../instrument_statuses',
success: function(data) {
for(var i = 0; i < data.length; i++) {
updateRelayStatus(data[i].instrument_status);
}
// Refresh 2 minutes after updating
setTimeout(loadRelayStatuses, 120000);
},
dataType: 'json'
});
}
function updateRelayStatus(stat) {
$checkbox = $("#relay_" + stat.instrument_id);
// remove pre-existing errors
$checkbox.parent().find("span.error").remove();
if (stat.error_message) {
$checkbox.prop("disabled", true);
// add a new error if there is one
$checkbox.parent().append($("<span class=\"error\" title=\"" + stat.error_message + "\"></span>"));
} else {
$checkbox.prop("disabled", false).prop("checked", stat.is_on);
}
$checkbox.parent().removeClass("loading");
$checkbox.trigger("change");
}
$('.relay_checkbox').addClass('loading');
// Only try to load relay statuses if there are relays to check
if ($('.relay_checkbox :checkbox').length > 0) loadRelayStatuses();
function lastHiddenInstrumentId() {
var hiddenInstruments = $('.timeline_instrument').filter(function() {
return $(window).scrollTop() + $('.timeline_header').height() > $(this).offset().top;
});
return hiddenInstruments.last().attr('id');
}
$('#reservation_left, #reservation_right').on('click', function(event) {
var urlWithoutFragment = this.href.split('#')[0]
this.href = urlWithoutFragment + '#' + lastHiddenInstrumentId()
});
});
|
// Cyphesis Online RPG Server and AI Engine
// Copyright (C) 2009 Alistair Riddoch
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software Foundation,
// Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
// $Id$
#ifdef NDEBUG
#undef NDEBUG
#endif
#ifndef DEBUG
#define DEBUG
#endif
#include <Python.h>
#include "python_testers.h"
#include "client/Python_ClientAPI.h"
#include "rulesets/Python_API.h"
#include <cassert>
static bool stub_make_fail = false;
static bool stub_look_fail = false;
static bool stub_lookfor_fail = false;
int main()
{
init_python_api("602fe3c3-e6c4-4c9a-b0ac-9f0a034042ba");
extend_client_python_api();
run_python_string("import server");
run_python_string("import atlas");
expect_python_error("server.CreatorClient(1)", PyExc_TypeError);
expect_python_error("server.CreatorClient(\"one\")", PyExc_ValueError);
run_python_string("c=server.CreatorClient(\"1\")");
run_python_string("c.as_entity()");
expect_python_error("c.make()", PyExc_TypeError);
expect_python_error("c.make('1')", PyExc_TypeError);
run_python_string("c.make(atlas.Entity('1'))");
stub_make_fail = true;
expect_python_error("c.make(atlas.Entity('1'))", PyExc_RuntimeError);
stub_make_fail = false;
run_python_string("c.set('1', atlas.Entity('1'))");
expect_python_error("c.set('1', 'not an entity')", PyExc_TypeError);
expect_python_error("c.set(1, atlas.Entity('1'))", PyExc_TypeError);
run_python_string("c.look('1')");
stub_look_fail = true;
expect_python_error("c.look('1')", PyExc_RuntimeError);
stub_look_fail = false;
expect_python_error("c.look(1)", PyExc_TypeError);
run_python_string("e=c.look('1')");
run_python_string("assert type(e) == server.LocatedEntity");
run_python_string("c.look_for(atlas.Entity('1'))");
stub_lookfor_fail = true;
run_python_string("c.look_for(atlas.Entity('1'))");
stub_lookfor_fail = false;
expect_python_error("c.look_for('1')", PyExc_TypeError);
run_python_string("c.send(atlas.Operation('info'))");
expect_python_error("c.send('info')", PyExc_TypeError);
expect_python_error("c.send()", PyExc_TypeError);
run_python_string("c.delete('1')");
expect_python_error("c.delete(1)", PyExc_TypeError);
expect_python_error("c.delete()", PyExc_TypeError);
run_python_string("c == server.CreatorClient(\"2\")");
run_python_string("assert type(c.map) == server.Map");
run_python_string("assert type(c.location) == atlas.Location");
run_python_string("assert type(c.time) == server.WorldTime");
expect_python_error("c.foo", PyExc_AttributeError);
expect_python_error("c.foo_operation", PyExc_AttributeError);
run_python_string("c.foo = 1");
run_python_string("assert c.foo == 1");
expect_python_error("c.foo = [1,2]", PyExc_ValueError);
expect_python_error("c.map = 1", PyExc_AttributeError);
shutdown_python_api();
return 0;
}
// stubs
#include "client/ObserverClient.h"
#include "client/CreatorClient.h"
#include "rulesets/Entity.h"
#include "common/id.h"
#include <Atlas/Objects/Operation.h>
#include <Atlas/Objects/RootEntity.h>
using Atlas::Objects::Entity::RootEntity;
LocatedEntity * CharacterClient::look(const std::string & id)
{
if (stub_look_fail) {
return 0;
}
return new Entity(id, integerId(id));
}
LocatedEntity * CharacterClient::lookFor(const RootEntity & entity)
{
if (stub_lookfor_fail) {
return 0;
}
return new Entity(entity->getId(), integerId(entity->getId()));
}
LocatedEntity * CreatorClient::make(const RootEntity & entity)
{
if (stub_make_fail) {
return 0;
}
return new Entity(entity->getId(), integerId(entity->getId()));
}
CreatorClient::CreatorClient(const std::string & id, long intId,
ClientConnection &c) :
CharacterClient(id, intId, c)
{
}
void CreatorClient::sendSet(const std::string & id,
const RootEntity & entity)
{
}
void CreatorClient::del(const std::string & id)
{
}
CharacterClient::CharacterClient(const std::string & id, long intId,
ClientConnection & c) :
BaseMind(id, intId), m_connection(c)
{
}
void CharacterClient::send(const Operation & op)
{
}
ObserverClient::ObserverClient()
{
}
ObserverClient::~ObserverClient()
{
}
int ObserverClient::setup(const std::string & account,
const std::string & password,
const std::string & avatar)
{
return 0;
}
int ObserverClient::teardown()
{
return 0;
}
void ObserverClient::idle()
{
}
BaseClient::BaseClient() : m_character(0)
{
}
BaseClient::~BaseClient()
{
}
Atlas::Objects::Root BaseClient::createSystemAccount()
{
return Atlas::Objects::Operation::Info();
}
Atlas::Objects::Root BaseClient::createAccount(const std::string & name,
const std::string & password)
{
return Atlas::Objects::Operation::Info();
}
void BaseClient::send(const Operation & op)
{
}
CreatorClient * BaseClient::createCharacter(const std::string & type)
{
return 0;
}
ClientConnection::ClientConnection()
{
}
ClientConnection::~ClientConnection()
{
}
int ClientConnection::wait()
{
return 0;
}
int ClientConnection::sendAndWaitReply(const Operation & op, OpVector & res)
{
return 0;
}
void ClientConnection::operation(const Operation & op)
{
}
|
#ifndef sequence_h
#define sequence_h
#include <vector>
namespace std {
template <class T>
class sequence
: public std::vector<T>
{
public:
sequence(T a, T b, T w = (T)1);
};
template <class T>
sequence<T>::sequence(T a, T b, T d)
: std::vector<T>((unsigned)((double)(b - a)/(double)d + 0.5))
{
for(unsigned i = 0; i < this->size(); ++i)
(*this)[i] = i * d + a;
}
} // namespace std
#endif //sequence_h
|
using FrannHammer.DataAccess.Contracts;
using FrannHammer.Domain.Contracts;
using FrannHammer.NetCore.WebApi.DataAccess;
using Microsoft.Extensions.DependencyInjection;
namespace FrannHammer.NetCore.WebApi.ServiceCollectionExtensions
{
public static class RepositoryServiceCollectionExtensions
{
public static IServiceCollection AddRepositorySupport(this IServiceCollection services)
{
services.AddTransient<IRepository<ICharacter>, InMemoryRepository<ICharacter>>();
services.AddTransient<IRepository<IMove>, InMemoryRepository<IMove>>();
services.AddTransient<IRepository<IMovement>, InMemoryRepository<IMovement>>();
services.AddTransient<IRepository<ICharacterAttributeRow>, InMemoryRepository<ICharacterAttributeRow>>();
services.AddTransient<IRepository<IUniqueData>, InMemoryRepository<IUniqueData>>();
return services;
}
}
}
|
package com.xxx.dao;
/**
* 演示表操作接口。
* <p/>
* User: xingle
* Date: 15/12/29
* Time: 10:16
*/
public interface DemoDao {
}
|
import * as webpack from 'webpack';
import * as merge from 'webpack-merge';
import common from './webpack.common';
const config: webpack.Configuration = merge(common, {
optimization: {
minimize: true
},
plugins: [
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify('production')
})
]
});
export default config;
|
#!/usr/bin/env python
#-*-coding:utf-8-*-
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
###############################################################################
# DOCS
###############################################################################
"""Setup for Ninja-ide (http://www.ninja-ide.org)
NINJA-IDE is a cross-platform integrated development environment (IDE).
NINJA-IDE runs on Linux/X11, Mac OS X and Windows desktop operating systems,
and allows developers to create applications for several purposes using all the
tools and utilities of NINJA-IDE, making the task of writing software easier
and more enjoyable.
"""
###############################################################################
# IMPORTS
###############################################################################
import sys
from setuptools import setup, find_packages
import ninja_ide
###############################################################################
# VALIDATE THE NEEDED MODULES
###############################################################################
# This modules can't be easy installed
# Syntax: [(module, url of the tutorial)...]
if sys.platform == 'win32':
NEEDED_MODULES = [("PyQt4",
"http://www.riverbankcomputing.co.uk/software/pyqt/intro"),
('win32con', "http://sourceforge.net/projects/pywin32/files/pywin32/")]
else:
NEEDED_MODULES = [("PyQt4",
"http://www.riverbankcomputing.co.uk/software/pyqt/intro"), ]
for mn, urlm in NEEDED_MODULES:
try:
__import__(mn)
except ImportError:
print("Module '%s' not found. For more details: '%s'.\n" % (mn, urlm))
sys.exit(1)
dependencies = []
if sys.platform == 'darwin':
dependencies.append("macfsevents")
elif sys.platform == 'linux2':
dependencies.append("pyinotify")
###############################################################################
# PRE-SETUP
###############################################################################
# Common
params = {
"name": ninja_ide.__prj__,
"version": ninja_ide.__version__,
"description": ninja_ide.__doc__,
"author": ninja_ide.__author__,
"author_email": ninja_ide.__mail__,
"url": ninja_ide.__url__,
"license": ninja_ide.__license__,
"keywords": "ide python ninja development",
"classifiers": ["Development Status :: Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: GNU General Public License (GPL)",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2"],
# Ninja need:
"install_requires": dependencies,
# include all resources
"include_package_data": True,
"package_data": {'': ['*.png', '*.gif', '*.jpg', '*.json', '*.qss',
'*.js', '*.html', '*.css', '*.qm', '*.qml']},
# include ninja pkg and setup the run script
"packages": find_packages() + [
'ninja_ide/addins',
'ninja_ide/addins/lang',
'ninja_ide/addins/qml',
'ninja_ide/addins/qml/img',
'ninja_ide/addins/syntax',
'ninja_ide/addins/theme',
'ninja_ide/img'],
#auto create scripts
"entry_points": {
'console_scripts': [
'ninja-ide = ninja_ide:setup_and_run',
],
'gui_scripts': [
'ninja-ide = ninja_ide:setup_and_run',
]
}
}
###############################################################################
# SETUP
###############################################################################
setup(**params)
###############################################################################
# MAIN
###############################################################################
if __name__ == '__main__':
print(__doc__)
|
'''
Functions related to creating repodata index files.
'''
from __future__ import absolute_import, division, print_function
import os
import bz2
import sys
import json
import tarfile
from os.path import isfile, join, getmtime
from conda_build.utils import file_info
from conda.compat import PY3
from conda.utils import md5_file
def read_index_tar(tar_path):
""" Returns the index.json dict inside the given package tarball. """
try:
with tarfile.open(tar_path) as t:
try:
return json.loads(t.extractfile('info/index.json').read().decode('utf-8'))
except EOFError:
raise RuntimeError("Could not extract %s. File probably corrupt."
% tar_path)
except OSError as e:
raise RuntimeError("Could not extract %s (%s)" % (tar_path, e))
except tarfile.ReadError:
raise RuntimeError("Could not extract metadata from %s. File probably corrupt." % tar_path)
def write_repodata(repodata, dir_path):
""" Write updated repodata.json and repodata.json.bz2 """
data = json.dumps(repodata, indent=2, sort_keys=True)
# strip trailing whitespace
data = '\n'.join(line.rstrip() for line in data.splitlines())
# make sure we have newline at the end
if not data.endswith('\n'):
data += '\n'
with open(join(dir_path, 'repodata.json'), 'w') as fo:
fo.write(data)
with open(join(dir_path, 'repodata.json.bz2'), 'wb') as fo:
fo.write(bz2.compress(data.encode('utf-8')))
def update_index(dir_path, verbose=False, force=False, check_md5=False, remove=True):
"""
Update all index files in dir_path with changed packages.
:param verbose: Should detailed status messages be output?
:type verbose: bool
:param force: Whether to re-index all packages (including those that
haven't changed) or not.
:type force: bool
:param check_md5: Whether to check MD5s instead of mtimes for determining
if a package changed.
:type check_md5: bool
"""
if verbose:
print("updating index in:", dir_path)
index_path = join(dir_path, '.index.json')
if force:
index = {}
else:
try:
mode_dict = {'mode': 'r', 'encoding': 'utf-8'} if PY3 else {'mode': 'rb'}
with open(index_path, **mode_dict) as fi:
index = json.load(fi)
except (IOError, ValueError):
index = {}
files = set(fn for fn in os.listdir(dir_path) if fn.endswith('.tar.bz2'))
if any(fn.startswith('_license-') for fn in files):
sys.exit("""\
Error:
Indexing a copy of the Anaconda conda package channel is neither
necessary nor supported. If you which to add your own packages,
you can do so by adding them to a separate channel.
""")
for fn in files:
path = join(dir_path, fn)
if fn in index:
if check_md5:
if index[fn]['md5'] == md5_file(path):
continue
elif index[fn]['mtime'] == getmtime(path):
continue
if verbose:
print('updating:', fn)
d = read_index_tar(path)
d.update(file_info(path))
index[fn] = d
for fn in files:
index[fn]['sig'] = '.' if isfile(join(dir_path, fn + '.sig')) else None
if remove:
# remove files from the index which are not on disk
for fn in set(index) - files:
if verbose:
print("removing:", fn)
del index[fn]
# Deal with Python 2 and 3's different json module type reqs
mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'}
with open(index_path, **mode_dict) as fo:
json.dump(index, fo, indent=2, sort_keys=True, default=str)
# --- new repodata
for fn in index:
info = index[fn]
for varname in 'arch', 'platform', 'mtime', 'ucs':
try:
del info[varname]
except KeyError:
pass
if 'requires' in info and 'depends' not in info:
info['depends'] = info['requires']
repodata = {'packages': index, 'info': {}}
write_repodata(repodata, dir_path)
|
/**
* Created by Fredrik on 9/25/14.
*/
(function () {
angular.module('ui.grid').config(['$provide', function($provide) {
$provide.decorator('i18nService', ['$delegate', function($delegate) {
$delegate.add('sv', {
aggregate: {
label: 'Artiklar'
},
groupPanel: {
description: 'Dra en kolumnrubrik hit och släpp den för att gruppera efter den kolumnen.'
},
search: {
placeholder: 'Sök...',
showingItems: 'Visar artiklar:',
selectedItems: 'Valda artiklar:',
totalItems: 'Antal artiklar:',
size: 'Sidstorlek:',
first: 'Första sidan',
next: 'Nästa sida',
previous: 'Föregående sida',
last: 'Sista sidan'
},
menu: {
text: 'Välj kolumner:'
},
sort: {
ascending: 'Sortera stigande',
descending: 'Sortera fallande',
remove: 'Inaktivera sortering'
},
column: {
hide: 'Göm kolumn'
},
aggregation: {
count: 'Antal rader: ',
sum: 'Summa: ',
avg: 'Genomsnitt: ',
min: 'Min: ',
max: 'Max: '
},
pinning: {
pinLeft: 'Fäst vänster',
pinRight: 'Fäst höger',
unpin: 'Lösgör'
},
gridMenu: {
columns: 'Kolumner:',
importerTitle: 'Importera fil',
exporterAllAsCsv: 'Exportera all data som CSV',
exporterVisibleAsCsv: 'Exportera synlig data som CSV',
exporterSelectedAsCsv: 'Exportera markerad data som CSV',
exporterAllAsPdf: 'Exportera all data som PDF',
exporterVisibleAsPdf: 'Exportera synlig data som PDF',
exporterSelectedAsPdf: 'Exportera markerad data som PDF'
},
importer: {
noHeaders: 'Kolumnnamn kunde inte härledas. Har filen ett sidhuvud?',
noObjects: 'Objekt kunde inte härledas. Har filen data undantaget sidhuvud?',
invalidCsv: 'Filen kunde inte behandlas, är den en giltig CSV?',
invalidJson: 'Filen kunde inte behandlas, är den en giltig JSON?',
jsonNotArray: 'Importerad JSON-fil måste innehålla ett fält. Import avbruten.'
},
pagination: {
sizes: 'Artiklar per sida',
totalItems: 'Artiklar'
}
});
return $delegate;
}]);
}]);
})();
|
/*
* Copyright © 2015 TELECOM Nancy
*
* This file is part of gps ROS module.
* gps ROS module is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* gps ROS module is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with gps ROS module. If not, see <http://www.gnu.org/licenses/>.
*/
#include <ros/ros.h>
#include "save.h"
/** Main node entry point. */
int main(int argc, char **argv)
{
ros::init(argc, argv, "gps_save_node");
ros::NodeHandle node;
ros::NodeHandle private_nh("~");
gps_save::GpsSave save(node, private_nh);
// handle callbacks until shut down
ros::spin();
return 0;
}
|
#include "speaker.h"
espeak_POSITION_TYPE position_type;
char *path=NULL;
int Buflength = 500, Options=0;
void* user_data;
t_espeak_callback *SynthCallback;
espeak_PARAMETER Parm;
char Voice[] = {"default"};
//char text[20] = {"Hello World!"};
unsigned int Size,position=0, end_position=0, flags=espeakCHARS_AUTO, *unique_identifier;
void initSpeaker()
{
espeak_Initialize(AUDIO_OUTPUT_PLAYBACK, Buflength, path, Options );
espeak_SetVoiceByName(Voice);
}
int speak_synchronously(string text)
{
int retval = speak(text);
espeak_Synchronize( );
return retval;
}
int speak(string text)
{
if (espeak_IsPlaying() == 1)
{
return -1;
}
int I, Run = 1, L;
Size = text.length() + 1;
//printf("Saying '%s'",text);
char *ctext;
ctext = &text[0];
espeak_Synth( ctext, Size, position, position_type, end_position, flags,
unique_identifier, user_data );
//espeak_Synchronize( );
return 0;
}
|
package com.bwssystems.HABridge.api.hue;
import java.util.ArrayList;
public class GroupClassTypes {
public final static String BATHROOM = "Bathroom";
public final static String BEDROOM = "Bedroom";
public final static String CARPORT = "Carport";
public final static String DINING = "Dining";
public final static String DRIVEWAY = "Driveway";
public final static String FRONT_DOOR = "Front door";
public final static String GARAGE = "Garage";
public final static String GARDEN = "Garden";
public final static String GYM = "Gym";
public final static String HALLWAY = "Hallway";
public final static String BEDROOM_KIDS = "Kids bedroom";
public final static String KITCHEN = "Kitchen";
public final static String LIVING_ROOM = "Living room";
public final static String NURSERY = "Nursery";
public final static String OFFICE = "Office";
public final static String OTHER = "Other";
public final static String RECREATION = "Recreation";
public final static String TERRACE = "Terrace";
public final static String TOILET = "Toilet";
ArrayList<String> groupClassTypes;
public GroupClassTypes() {
groupClassTypes = new ArrayList<String>();
groupClassTypes.add(BATHROOM);
groupClassTypes.add(BEDROOM);
groupClassTypes.add(CARPORT);
groupClassTypes.add(DINING);
groupClassTypes.add(DRIVEWAY);
groupClassTypes.add(FRONT_DOOR);
groupClassTypes.add(GARAGE);
groupClassTypes.add(GARDEN);
groupClassTypes.add(GYM);
groupClassTypes.add(HALLWAY);
groupClassTypes.add(BEDROOM_KIDS);
groupClassTypes.add(KITCHEN);
groupClassTypes.add(LIVING_ROOM);
groupClassTypes.add(NURSERY);
groupClassTypes.add(OFFICE);
groupClassTypes.add(OTHER);
groupClassTypes.add(RECREATION);
groupClassTypes.add(TERRACE);
groupClassTypes.add(TOILET);
}
public Boolean validateType(String type) {
if(type == null || type.trim().isEmpty())
return false;
for(String classType : groupClassTypes) {
if(type.trim().contentEquals(classType))
return true;
}
return false;
}
}
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Task.content'
db.alter_column('Listigain_task', 'content', self.gf('django.db.models.fields.CharField')(max_length=125))
def backwards(self, orm):
# Changing field 'Task.content'
db.alter_column('Listigain_task', 'content', self.gf('django.db.models.fields.CharField')(max_length=200))
models = {
'Listigain.task': {
'Meta': {'object_name': 'Task'},
'category': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'content': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'difficulty': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'priority': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'skip': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'time': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 28, 21, 19, 41, 183000)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 28, 21, 19, 41, 182000)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['Listigain']
|
# encoding: utf-8
import csv
from sqlalchemy import Table, Column, Integer, String, Float, Boolean
from sqlalchemy import ForeignKey, Sequence
from sqlalchemy.orm import relationship
import osgeo.ogr as ogr
import osgeo.osr as osr
from geoalchemy import GeometryColumn, Polygon, GeometryDDL, WKTSpatialElement
from nextgisbio.models import DBSession, Base
from nextgisbio.models import Key_area
from nextgisbio.models import NoResultFound
from nextgisbio.utils.jsonify import JsonifyMixin
# Таблица для связи многие-ко-многим для ключевых участков и полигонов
square_keyarea_association = Table('square_karea_association', Base.metadata,
Column('square_id', Integer, ForeignKey('square.id')),
Column('key_area_id', Integer, ForeignKey('key_area.id'))
)
class Squares(Base, JsonifyMixin):
__tablename__ = 'square'
id = Column(Integer, Sequence('square_id_seq', start=1), primary_key=True)
key_areas = relationship('Key_area', secondary=square_keyarea_association, backref='squares')
geom = GeometryColumn(Polygon(dimension=2, srid=3857))
@staticmethod
def add_from_file(associations_filename, shp_filename):
'''
Добавить данные из shp-файла shp_filename. Первое поле аттрибутивной таблицы--идентификатор.
Одновременно добавляются в таблицу связи данные из файла с разделителями associations_filename.
Файл filename в формате csv (разделитель табуляция), колонки:
square_id key_area_id
'''
import transaction
with transaction.manager:
dbsession = DBSession()
ogrData = ogr.Open(shp_filename)
layer = ogrData.GetLayer(0)
sq = layer.GetNextFeature()
while sq is not None:
id = sq.GetFieldAsString(0)
geom = sq.GetGeometryRef()
geom = geom.ExportToWkt()
square = Squares(id=id, geom=WKTSpatialElement(geom, srid=3857))
dbsession.add(square)
sq = layer.GetNextFeature()
dbsession.flush()
reader = csv.reader(open(associations_filename), delimiter='\t')
reader.next()
records = [line for line in reader]
for id, key_area_id in records:
# Определим ключевоq уч-к по его id
key_a = dbsession.query(Key_area).filter_by(id=key_area_id).one()
# Определим полигон по его id
square = dbsession.query(Squares).filter_by(id=id).one()
square.key_areas.append(key_a)
@staticmethod
def export_to_file(filename):
from nextgisbio.utils.dump_to_file import dump
fieldnames = ['square_id', 'key_area_id']
squares_from_db = DBSession().query(Squares).join(Squares.key_areas).order_by(Squares.id).all()
squares = []
for square in squares_from_db:
for key_area in square.key_areas:
squares.append([square.id, key_area.id])
dump(filename, fieldnames, squares, is_array=True)
GeometryDDL(Squares.__table__)
|
#!/usr/bin/env python
import os
import json
import zmq
import common.realtime as realtime
from common.services import service_list
from selfdrive.swaglog import cloudlog
import selfdrive.messaging as messaging
import uploader
from logger import Logger
from selfdrive.loggerd.config import ROOT, SEGMENT_LENGTH
def gen_init_data(gctx):
msg = messaging.new_message()
kernel_args = open("/proc/cmdline", "r").read().strip().split(" ")
msg.initData.kernelArgs = kernel_args
msg.initData.gctx = json.dumps(gctx)
if os.getenv('DONGLE_ID'):
msg.initData.dongleId = os.getenv('DONGLE_ID')
return msg.to_bytes()
def main(gctx=None):
logger = Logger(ROOT, gen_init_data(gctx))
context = zmq.Context()
poller = zmq.Poller()
# we push messages to visiond to rotate image recordings
vision_control_sock = context.socket(zmq.PUSH)
vision_control_sock.connect("tcp://127.0.0.1:8001")
# register listeners for all services
for service in service_list.itervalues():
if service.should_log and service.port is not None:
messaging.sub_sock(context, service.port, poller)
uploader.clear_locks(ROOT)
cur_dir, cur_part = logger.start()
try:
cloudlog.info("starting in dir %r", cur_dir)
rotate_msg = messaging.log.LogRotate.new_message()
rotate_msg.segmentNum = cur_part
rotate_msg.path = cur_dir
vision_control_sock.send(rotate_msg.to_bytes())
last_rotate = realtime.sec_since_boot()
while True:
polld = poller.poll(timeout=1000)
for sock, mode in polld:
if mode != zmq.POLLIN:
continue
dat = sock.recv()
# print "got", len(dat), realtime.sec_since_boot()
# logevent = log_capnp.Event.from_bytes(dat)
# print str(logevent)
logger.log_data(dat)
t = realtime.sec_since_boot()
if (t - last_rotate) > SEGMENT_LENGTH:
last_rotate += SEGMENT_LENGTH
cur_dir, cur_part = logger.rotate()
cloudlog.info("rotated to %r", cur_dir)
rotate_msg = messaging.log.LogRotate.new_message()
rotate_msg.segmentNum = cur_part
rotate_msg.path = cur_dir
vision_control_sock.send(rotate_msg.to_bytes())
finally:
cloudlog.info("loggerd exiting...")
# tell visiond to stop logging
rotate_msg = messaging.log.LogRotate.new_message()
rotate_msg.segmentNum = -1
rotate_msg.path = "/dev/null"
vision_control_sock.send(rotate_msg.to_bytes())
# stop logging
logger.stop()
if __name__ == "__main__":
main()
|
/*
net/netlib.h
*/
#ifndef _NET__NETLIB_H_
#define _NET__NETLIB_H_
#ifndef _ANSI
#include <ansi.h>
#endif
_PROTOTYPE (int iruserok, (unsigned long raddr, int superuser,
const char *ruser, const char *luser) );
_PROTOTYPE (int rcmd, (char **ahost, int rport, const char *locuser,
const char *remuser, const char *cmd, int *fd2p) );
#define IPSTAT_DEVICE "/dev/ipstat"
#define ETH_DEVICE "/dev/eth"
#define IP_DEVICE "/dev/ip"
#define TCP_DEVICE "/dev/tcp"
#define UDP_DEVICE "/dev/udp"
#endif /* _NET__NETLIB_H_ */
|
/* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */
/*
** asm/setup.h -- Definition of the Linux/m68k setup information
**
** Copyright 1992 by Greg Harp
**
** This file is subject to the terms and conditions of the GNU General Public
** License. See the file COPYING in the main directory of this archive
** for more details.
*/
#ifndef _UAPI_M68K_SETUP_H
#define _UAPI_M68K_SETUP_H
#define COMMAND_LINE_SIZE 256
#endif /* _UAPI_M68K_SETUP_H */
|
# -*- coding: utf-8 -*-
"""
A list of Romanian counties as `choices` in a formfield.
This exists as a standalone file so that it's only imported into memory when
explicitly needed.
"""
COUNTIES_CHOICES = (
('AB', u'Alba'),
('AR', u'Arad'),
('AG', u'Argeş'),
('BC', u'Bacău'),
('BH', u'Bihor'),
('BN', u'Bistriţa-Năsăud'),
('BT', u'Botoşani'),
('BV', u'Braşov'),
('BR', u'Brăila'),
('B', u'Bucureşti'),
('BZ', u'Buzău'),
('CS', u'Caraş-Severin'),
('CL', u'Călăraşi'),
('CJ', u'Cluj'),
('CT', u'Constanţa'),
('CV', u'Covasna'),
('DB', u'Dâmboviţa'),
('DJ', u'Dolj'),
('GL', u'Galaţi'),
('GR', u'Giurgiu'),
('GJ', u'Gorj'),
('HR', u'Harghita'),
('HD', u'Hunedoara'),
('IL', u'Ialomiţa'),
('IS', u'Iaşi'),
('IF', u'Ilfov'),
('MM', u'Maramureş'),
('MH', u'Mehedinţi'),
('MS', u'Mureş'),
('NT', u'Neamţ'),
('OT', u'Olt'),
('PH', u'Prahova'),
('SM', u'Satu Mare'),
('SJ', u'Sălaj'),
('SB', u'Sibiu'),
('SV', u'Suceava'),
('TR', u'Teleorman'),
('TM', u'Timiş'),
('TL', u'Tulcea'),
('VS', u'Vaslui'),
('VL', u'Vâlcea'),
('VN', u'Vrancea'),
)
|
import os
from os.path import exists
from os.path import join
import warnings
import numpy as np
from sklearn.utils import IS_PYPY
from sklearn.utils.testing import SkipTest
from sklearn.utils.testing import check_skip_network
from sklearn.datasets import get_data_home
from sklearn.datasets.base import _pkl_filepath
from sklearn.datasets.twenty_newsgroups import CACHE_NAME
from sklearn.utils.testing import install_mldata_mock
from sklearn.utils.testing import uninstall_mldata_mock
def setup_labeled_faces():
data_home = get_data_home()
if not exists(join(data_home, 'lfw_home')):
raise SkipTest("Skipping dataset loading doctests")
def setup_mldata():
# setup mock urllib2 module to avoid downloading from mldata.org
install_mldata_mock({
'mnist-original': {
'data': np.empty((70000, 784)),
'label': np.repeat(np.arange(10, dtype='d'), 7000),
},
'iris': {
'data': np.empty((150, 4)),
},
'datasets-uci-iris': {
'double0': np.empty((150, 4)),
'class': np.empty((150,)),
},
})
def teardown_mldata():
uninstall_mldata_mock()
def setup_rcv1():
check_skip_network()
# skip the test in rcv1.rst if the dataset is not already loaded
rcv1_dir = join(get_data_home(), "RCV1")
if not exists(rcv1_dir):
raise SkipTest("Download RCV1 dataset to run this test.")
def setup_twenty_newsgroups():
data_home = get_data_home()
cache_path = _pkl_filepath(get_data_home(), CACHE_NAME)
if not exists(cache_path):
raise SkipTest("Skipping dataset loading doctests")
def setup_working_with_text_data():
if IS_PYPY and os.environ.get('CI', None):
raise SkipTest('Skipping too slow test with PyPy on CI')
check_skip_network()
cache_path = _pkl_filepath(get_data_home(), CACHE_NAME)
if not exists(cache_path):
raise SkipTest("Skipping dataset loading doctests")
def setup_compose():
try:
import pandas # noqa
except ImportError:
raise SkipTest("Skipping compose.rst, pandas not installed")
def setup_impute():
try:
import pandas # noqa
except ImportError:
raise SkipTest("Skipping impute.rst, pandas not installed")
def setup_unsupervised_learning():
# ignore deprecation warnings from scipy.misc.face
warnings.filterwarnings('ignore', 'The binary mode of fromstring',
DeprecationWarning)
def pytest_runtest_setup(item):
fname = item.fspath.strpath
is_index = fname.endswith('datasets/index.rst')
if fname.endswith('datasets/labeled_faces.rst') or is_index:
setup_labeled_faces()
elif fname.endswith('datasets/mldata.rst') or is_index:
setup_mldata()
elif fname.endswith('datasets/rcv1.rst') or is_index:
setup_rcv1()
elif fname.endswith('datasets/twenty_newsgroups.rst') or is_index:
setup_twenty_newsgroups()
elif fname.endswith('tutorial/text_analytics/working_with_text_data.rst')\
or is_index:
setup_working_with_text_data()
elif fname.endswith('modules/compose.rst') or is_index:
setup_compose()
elif IS_PYPY and fname.endswith('modules/feature_extraction.rst'):
raise SkipTest('FeatureHasher is not compatible with PyPy')
elif fname.endswith('modules/impute.rst'):
setup_impute()
elif fname.endswith('statistical_inference/unsupervised_learning.rst'):
setup_unsupervised_learning()
def pytest_runtest_teardown(item):
fname = item.fspath.strpath
if fname.endswith('datasets/mldata.rst'):
teardown_mldata()
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for out-of-memory conditions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.compiler.tests import xla_test
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.platform import googletest
class OutOfMemoryTest(xla_test.XLATestCase):
def testOutputOutOfMemory(self):
"""Allocates tensors until out of memory.
Generates a large rank-1 tensor. The tensor is an output of an XLA
computation, not constant.
Check that a ResourceExhaustedError is raised and can be caught.
We spin in a loop generating larger and larger tensors until an OOM event
happens. We may be running sandboxed, so have a small host memory limit, so
any hardcoded value is unlikely to land in the sweet spot between device
memory size and host memory size with stability.
"""
def test_loop():
size = int(2e8)
while True:
with self.test_session():
# Force the compiled code to not be constant by feeding in a
# parameter.
p = array_ops.placeholder(dtypes.float32, shape=[2, 1, 1])
with self.test_scope():
# Create a computation that produces a large R1 tensor as an
# intermediate result. Reduce it down so that if this file was
# compiled without --config=cuda, we don't force a D2H copy of a
# large tensor and potentially OOM the host.
#
# This is a bit tricky because XLA:GPU doesn't currently support RNG
# ops. Here we rely on the fact that XLA doesn't do algebraic
# simplifications on conv(<ones>, <filter>).
c = math_ops.reduce_sum(
nn_ops.convolution(
array_ops.ones([1, size, 1]),
p,
padding='SAME',
data_format='NWC'))
c.eval(feed_dict={p: [[[1.0]], [[2.0]]]})
size *= 2
self.assertRaises(errors.ResourceExhaustedError, test_loop)
if __name__ == '__main__':
googletest.main()
|
#include <winpr/crt.h>
#include <winpr/pool.h>
#include <winpr/interlocked.h>
static LONG count = 0;
void CALLBACK test_WorkCallback(PTP_CALLBACK_INSTANCE instance, void* context, PTP_WORK work)
{
int index;
BYTE a[1024];
BYTE b[1024];
BYTE c[1024];
printf("Hello %s: %d (thread: %d)\n", context,
InterlockedIncrement(&count), GetCurrentThreadId());
for (index = 0; index < 100; index++)
{
ZeroMemory(a, 1024);
ZeroMemory(b, 1024);
ZeroMemory(c, 1024);
FillMemory(a, 1024, 0xAA);
FillMemory(b, 1024, 0xBB);
CopyMemory(c, a, 1024);
CopyMemory(c, b, 1024);
}
}
int TestPoolWork(int argc, char* argv[])
{
int index;
PTP_POOL pool;
PTP_WORK work;
PTP_CLEANUP_GROUP cleanupGroup;
TP_CALLBACK_ENVIRON environment;
printf("Global Thread Pool\n");
work = CreateThreadpoolWork((PTP_WORK_CALLBACK) test_WorkCallback, "world", NULL);
if (!work)
{
printf("CreateThreadpoolWork failure\n");
return -1;
}
/**
* You can post a work object one or more times (up to MAXULONG) without waiting for prior callbacks to complete.
* The callbacks will execute in parallel. To improve efficiency, the thread pool may throttle the threads.
*/
for (index = 0; index < 10; index++)
SubmitThreadpoolWork(work);
WaitForThreadpoolWorkCallbacks(work, FALSE);
CloseThreadpoolWork(work);
printf("Private Thread Pool\n");
pool = CreateThreadpool(NULL);
SetThreadpoolThreadMinimum(pool, 4);
SetThreadpoolThreadMaximum(pool, 8);
InitializeThreadpoolEnvironment(&environment);
SetThreadpoolCallbackPool(&environment, pool);
cleanupGroup = CreateThreadpoolCleanupGroup();
if (!cleanupGroup)
{
printf("CreateThreadpoolCleanupGroup failure\n");
return -1;
}
SetThreadpoolCallbackCleanupGroup(&environment, cleanupGroup, NULL);
work = CreateThreadpoolWork((PTP_WORK_CALLBACK) test_WorkCallback, "world", &environment);
if (!work)
{
printf("CreateThreadpoolWork failure\n");
return -1;
}
for (index = 0; index < 10; index++)
SubmitThreadpoolWork(work);
WaitForThreadpoolWorkCallbacks(work, FALSE);
CloseThreadpoolCleanupGroupMembers(cleanupGroup, TRUE, NULL);
CloseThreadpoolCleanupGroup(cleanupGroup);
DestroyThreadpoolEnvironment(&environment);
CloseThreadpoolWork(work);
CloseThreadpool(pool);
return 0;
}
|
# -*- coding: utf-8 -*-
'''
Created on 16.04.15
@author = mharder
'''
import six
from bonfire.formats import tail_format, dump_format
from bonfire.graylog_api import Message
import arrow
def test_dump_format():
ts = arrow.get()
ts_str = ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
formatter = dump_format(["a", "b", "c"])
assert formatter(Message({"timestamp": ts, "message": {}})) == "{};'';'';''".format(ts_str)
assert formatter(Message({"timestamp": ts, "message": {"a": "d"}})) == "{};'d';'';''".format(ts_str)
assert formatter(Message({"timestamp": ts, "message": {"a": "d", "b": "e", "c": "f"}})) == "{};'d';'e';'f'".format(ts_str)
assert formatter(Message({"timestamp": ts, "message": {"a": "d", "b": "e", "c": "f", "g": "h"}})) == "{};'d';'e';'f'".format(ts_str)
def test_tail_format():
formatter_wc = tail_format(["source", "facility", "line", "module"])
formatter = tail_format(["source", "facility", "line", "module"], color=False)
run_tail_test_with_formatter_wc(formatter_wc)
run_tail_test_with_formatter(formatter)
formatter_wc = tail_format(["source", "facility", "line", "module", "message"])
formatter = tail_format(["source", "facility", "line", "module", "message"], color=False)
run_tail_test_with_formatter_wc(formatter_wc)
run_tail_test_with_formatter(formatter)
def run_tail_test_with_formatter_wc(formatter):
ts = arrow.get()
message = {
"message": "Hällo Wörld, Здравствулте мир, γειά σου κόσμος",
"source": "a",
"level": 2,
"facility": "b",
"line": 10,
"module": "c",
"timestamp": ts
}
result = six.u("\x1b[41m\x1b[37mCRITICAL[{}] Hällo Wörld, Здравствулте мир, γειά σου κόσμος # source:a; facility:b; line:10; module:c\x1b[0m").format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
message["level"] = 3
result = six.u("\x1b[31mERROR [{}] Hällo Wörld, Здравствулте мир, γειά σου κόσμος # source:a; facility:b; line:10; module:c\x1b[0m").format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
message["level"] = 4
result = six.u("\x1b[33mWARNING [{}] Hällo Wörld, Здравствулте мир, γειά σου κόσμος # source:a; facility:b; line:10; module:c\x1b[0m").format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
message["level"] = 5
result = six.u("\x1b[32mNOTICE [{}] Hällo Wörld, Здравствулте мир, γειά σου κόσμος # source:a; facility:b; line:10; module:c\x1b[0m").format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
message["level"] = 6
result = six.u("\x1b[32mINFO [{}] Hällo Wörld, Здравствулте мир, γειά σου κόσμος # source:a; facility:b; line:10; module:c\x1b[0m").format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
message["level"] = 7
result = six.u("\x1b[34mDEBUG [{}] Hällo Wörld, Здравствулте мир, γειά σου κόσμος # source:a; facility:b; line:10; module:c\x1b[0m").format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
def run_tail_test_with_formatter(formatter):
ts = arrow.get()
message = {
"message": "Hallo World",
"source": "a",
"level": 2,
"facility": "b",
"line": 10,
"module": "c",
"timestamp": ts
}
result = "CRITICAL[{}] Hallo World # source:a; facility:b; line:10; module:c".format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
message["level"] = 3
result = "ERROR [{}] Hallo World # source:a; facility:b; line:10; module:c".format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
message["level"] = 4
result = "WARNING [{}] Hallo World # source:a; facility:b; line:10; module:c".format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
message["level"] = 5
result = "NOTICE [{}] Hallo World # source:a; facility:b; line:10; module:c".format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
message["level"] = 6
result = "INFO [{}] Hallo World # source:a; facility:b; line:10; module:c".format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
message["level"] = 7
result = "DEBUG [{}] Hallo World # source:a; facility:b; line:10; module:c".format(
ts.to('local').format("YYYY-MM-DD HH:mm:ss.SS")
)
assert formatter(Message({"message": message})) == result
|
/*
* nt loader
*
* Copyright 2006-2008 Mike McCormack
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
#ifndef __NTNATIVE_PTRACE_BASE_H
#define __NTNATIVE_PTRACE_BASE_H
#include "config.h"
class ptrace_address_space_impl: public address_space_impl
{
protected:
static ptrace_address_space_impl *sig_target;
static void cancel_timer();
static void sigitimer_handler(int signal);
int get_context( PCONTEXT ctx );
int set_context( PCONTEXT ctx );
int ptrace_run( PCONTEXT ctx, int single_step, LARGE_INTEGER& timeout );
virtual pid_t get_child_pid() = 0;
virtual void handle( int signal );
virtual void run( void *TebBaseAddress, PCONTEXT ctx, int single_step, LARGE_INTEGER& timeout, execution_context_t *exec );
virtual void alarm_timeout(LARGE_INTEGER& timeout);
virtual int set_userspace_fs(void *TebBaseAddress, ULONG fs);
virtual void init_context( CONTEXT& ctx );
virtual unsigned short get_userspace_fs() = 0;
virtual unsigned short get_userspace_data_seg();
virtual unsigned short get_userspace_code_seg();
virtual int get_fault_info( void *& addr );
void wait_for_signal( pid_t pid, int signal );
public:
static void set_signals();
};
#endif // __NTNATIVE_PTRACE_BASE_H
|
package com.swifts.frame.modules.wx.fastweixin.api;
import com.swifts.frame.modules.wx.fastweixin.api.config.ApiConfig;
import com.swifts.frame.modules.wx.fastweixin.api.entity.Article;
import com.swifts.frame.modules.wx.fastweixin.api.enums.MediaType;
import com.swifts.frame.modules.wx.fastweixin.util.JSONUtil;
import com.swifts.frame.modules.wx.fastweixin.util.NetWorkCenter;
import com.swifts.frame.modules.wx.fastweixin.util.StreamUtil;
import com.swifts.frame.modules.wx.fastweixin.api.response.BaseResponse;
import com.swifts.frame.modules.wx.fastweixin.api.response.DownloadMediaResponse;
import com.swifts.frame.modules.wx.fastweixin.api.response.UploadImgResponse;
import com.swifts.frame.modules.wx.fastweixin.api.response.UploadMediaResponse;
import org.apache.http.Header;
import org.apache.http.HttpStatus;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 多媒体资源API
*
* @author peiyu
*/
public class MediaAPI extends BaseAPI {
private static final Logger LOG = LoggerFactory.getLogger(MediaAPI.class);
public MediaAPI(ApiConfig config) {
super(config);
}
/**
* 上传资源,会在微信服务器上保存3天,之后会被删除
*
* @param type 资源类型
* @param file 需要上传的文件
* @return 响应对象
*/
public UploadMediaResponse uploadMedia(MediaType type, File file) {
UploadMediaResponse response;
String url = "http://file.api.weixin.qq.com/cgi-bin/media/upload?access_token=#&type=" + type.toString();
BaseResponse r = executePost(url, null, file);
response = JSONUtil.toBean(r.getErrmsg(), UploadMediaResponse.class);
return response;
}
/**
* 上传群发文章素材。
*
* @param articles 上传的文章信息
* @return 响应对象
*/
public UploadMediaResponse uploadNews(List<Article> articles){
UploadMediaResponse response;
String url = BASE_API_URL + "cgi-bin/media/uploadnews?access_token=#";
final Map<String, Object> params = new HashMap<String, Object>();
params.put("articles", articles);
BaseResponse r = executePost(url, JSONUtil.toJson(params));
response = JSONUtil.toBean(r.getErrmsg(), UploadMediaResponse.class);
return response;
}
/**
* 上传群发消息图片素材
*/
public UploadImgResponse uploadImg(File file){
UploadImgResponse response;
String url = "https://api.weixin.qq.com/cgi-bin/media/uploadimg?access_token=#";
BaseResponse r = executePost(url, null, file);
response = JSONUtil.toBean(r.getErrmsg(), UploadImgResponse.class);
return response;
}
/**
* 下载资源,实现的很不好,反正能用了。搞的晕了,之后会优化
*
* @param mediaId 微信提供的资源唯一标识
* @return 响应对象
*/
public DownloadMediaResponse downloadMedia(String mediaId) {
DownloadMediaResponse response = new DownloadMediaResponse();
String url = "http://file.api.weixin.qq.com/cgi-bin/media/get?access_token=" + this.config.getAccessToken() + "&media_id=" + mediaId;
RequestConfig config = RequestConfig.custom().setConnectionRequestTimeout(NetWorkCenter.CONNECT_TIMEOUT).setConnectTimeout(NetWorkCenter.CONNECT_TIMEOUT).setSocketTimeout(NetWorkCenter.CONNECT_TIMEOUT).build();
CloseableHttpClient client = HttpClientBuilder.create().setDefaultRequestConfig(config).build();
HttpGet get = new HttpGet(url);
try {
CloseableHttpResponse r = client.execute(get);
if (HttpStatus.SC_OK == r.getStatusLine().getStatusCode()) {
InputStream inputStream = r.getEntity().getContent();
Header[] headers = r.getHeaders("Content-disposition");
if (null != headers && 0 != headers.length) {
Header length = r.getHeaders("Content-Length")[0];
response.setContent(inputStream, Integer.valueOf(length.getValue()));
response.setFileName(headers[0].getElements()[0].getParameterByName("filename").getValue());
} else {
ByteArrayOutputStream out = new ByteArrayOutputStream();
StreamUtil.copy(inputStream, out);
String json = out.toString();
response = JSONUtil.toBean(json, DownloadMediaResponse.class);
}
}
} catch (IOException e) {
LOG.error("IO处理异常", e);
} finally {
try {
client.close();
} catch (IOException e) {
LOG.error("异常", e);
}
}
return response;
}
}
|
// Copyright 2020 Las Venturas Playground. All rights reserved.
// Use of this source code is governed by the MIT license, a copy of which can
// be found in the LICENSE file.
import { Feature } from 'components/feature_manager/feature.js';
import { MockRaceDatabase } from 'features/races/mock_race_database.js';
import { RaceCommands } from 'features/races/race_commands.js';
import { RaceDatabase } from 'features/races/race_database.js';
import { RaceDescription } from 'features/races/race_description.js';
import { RaceGame } from 'features/races/race_game.js';
import { Setting } from 'entities/setting.js';
import { VehicleGameRegistry } from 'features/games_vehicles/vehicle_game_registry.js';
// Directory in which each of the race configuration files have been defined.
const kRaceDirectory = 'data/races/';
// The Races feature is responsible for providing the race interface on the server. It builds on top
// of the Games API, for ensuring consistent behaviour of games on the server.
export default class Races extends Feature {
commands_ = null;
database_ = null;
games_ = null;
registry_ = null;
constructor() {
super();
// The Races feature depends on the Games Vehicles API for providing its functionality.
this.games_ = this.defineDependency('games_vehicles');
this.games_.addReloadObserver(this, () => this.registerGame());
// The database, in which high scores and checkpoint data will be stored. Will also be used
// to determine the popularity ranking for games, which the command menu will use.
this.database_ = server.isTest() ? new MockRaceDatabase()
: new RaceDatabase();
// The registry is responsible for keeping tabs on the available races.
this.registry_ = new VehicleGameRegistry('race', kRaceDirectory, RaceDescription);
// Provides the commands through which players can interact with the race system.
this.commands_ = new RaceCommands(this.database_, this.games_, this.registry_);
// Immediately register the RaceGame so that the Games API knows of its existence.
this.registerGame();
}
// ---------------------------------------------------------------------------------------------
// Registers the RaceGame with the Games API as a game that can be started by players. The entry
// point will continue to be the "/race" command.
registerGame() {
this.games_().registerGame(RaceGame, {
name: Races.prototype.generateRaceName.bind(this),
commandFn: Races.prototype.generateRaceCommand.bind(this),
goal: 'Complete the race track in the shortest possible time.',
scoreType: 'time',
minimumPlayers: 1,
maximumPlayers: 4,
price: 0,
settings: [
// Option: Game Description ID (number)
new Setting('game', 'description_id', Setting.TYPE_NUMBER, -1, 'Description ID'),
],
}, { database: this.database_, registry: this.registry_ });
}
// Generates the command through which a particular race can be started, information which will
// be conveyed through the |settings| argument. NULL when absent.
generateRaceCommand(settings) {
const description = this.registry_.getDescription(settings.get('game/description_id'));
return description ? `race ${description.id}`
: null;
}
// Generates the name for the derby described by the given |settings|. It depends on the game's
// ID that's contained within the |settings|, which should be known to the registry.
generateRaceName(settings) {
const description = this.registry_.getDescription(settings.get('game/description_id'));
return description ? description.name
: 'Race';
}
// ---------------------------------------------------------------------------------------------
// The races feature does not define a public API.
// ---------------------------------------------------------------------------------------------
dispose() {
this.games_().removeGame(RaceGame);
this.games_.removeReloadObserver(this);
this.games_ = null;
this.commands_.dispose();
this.commands_ = null;
this.registry_.dispose();
this.registry_ = null;
this.database_ = null;
}
}
|
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core.services import luci_auth
from core.services import request
SERVICE_URL = 'https://pinpoint-dot-chromeperf.appspot.com/api'
def Request(endpoint, **kwargs):
"""Send a request to some pinpoint endpoint."""
kwargs.setdefault('use_auth', True)
kwargs.setdefault('accept', 'json')
return request.Request(SERVICE_URL + endpoint, **kwargs)
def Job(job_id, with_state=False, with_tags=False):
"""Get job information from its id."""
params = []
if with_state:
params.append(('o', 'STATE'))
if with_tags:
params.append(('o', 'TAGS'))
return Request('/job/%s' % job_id, params=params)
def Jobs():
"""List jobs for the authenticated user."""
return Request('/jobs')
def NewJob(**kwargs):
"""Create a new pinpoint job."""
if 'user' not in kwargs:
kwargs['user'] = luci_auth.GetUserEmail()
return Request('/new', method='POST', data=kwargs)
|
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import MuiThemeProvider from 'material-ui/styles/MuiThemeProvider';
import injectTapEventPlugin from 'react-tap-event-plugin';
import Toolbar from './Toolbar';
import GameDialog from './GameDialog';
import GameList from './GameList';
injectTapEventPlugin();
class App extends Component {
render() {
return (
<MuiThemeProvider>
<div className='app-container'>
<Toolbar />
<GameDialog />
<GameList />
<footer className='app-footer'>
Cerebral Game Library © 2017
</footer>
</div>
</MuiThemeProvider>
);
}
}
App.propTypes = {};
export default App;
|
import os
import sys
import csv
import json
OUTPUT_FORMATS = ('csv', 'json', 'yara', 'netflow', )
def getHandler(output_format):
output_format = output_format.lower()
if output_format not in OUTPUT_FORMATS:
print("[WARNING] Invalid output format specified.. using CSV")
output_format = 'csv'
handler_format = "OutputHandler_" + output_format
handler_class = getattr(sys.modules[__name__], handler_format)
return handler_class()
class OutputHandler(object):
def print_match(self, fpath, page, name, match):
pass
def print_header(self, fpath):
pass
def print_footer(self, fpath):
pass
def print_error(self, fpath, exception):
print("[ERROR] %s" % (exception))
class OutputHandler_csv(OutputHandler):
def __init__(self):
self.csv_writer = csv.writer(sys.stdout, delimiter = '\t')
def print_match(self, fpath, page, name, match):
self.csv_writer.writerow((fpath, page, name, match))
def print_error(self, fpath, exception):
self.csv_writer.writerow((fpath, '0', 'error', exception))
class OutputHandler_json(OutputHandler):
def print_match(self, fpath, page, name, match):
data = {
'path' : fpath,
'file' : os.path.basename(fpath),
'page' : page,
'type' : name,
'match': match
}
print(json.dumps(data))
def print_error(self, fpath, exception):
data = {
'path' : fpath,
'file' : os.path.basename(fpath),
'type' : 'error',
'exception' : exception
}
print(json.dumps(data))
class OutputHandler_yara(OutputHandler):
def __init__(self):
self.rule_enc = ''.join(chr(c) if chr(c).isupper() or chr(c).islower() or chr(c).isdigit() else '_' for c in range(256))
def print_match(self, fpath, page, name, match):
if name in self.cnt:
self.cnt[name] += 1
else:
self.cnt[name] = 1
string_id = "$%s%d" % (name, self.cnt[name])
self.sids.append(string_id)
string_value = match.replace('\\', '\\\\')
print("\t\t%s = \"%s\"" % (string_id, string_value))
def print_header(self, fpath):
rule_name = os.path.splitext(os.path.basename(fpath))[0].translate(self.rule_enc)
print("rule %s" % (rule_name))
print("{")
print("\tstrings:")
self.cnt = {}
self.sids = []
def print_footer(self, fpath):
cond = ' or '.join(self.sids)
print("\tcondition:")
print("\t\t" + cond)
print("}")
class OutputHandler_netflow(OutputHandler):
def __init__(self):
print "host 255.255.255.255"
def print_match(self, fpath, page, name, match):
data = {
'type' : name,
'match': match
}
if data["type"] == "IP":
print " or host %s " % data["match"]
|
from datetime import datetime
import random
import semantics.simulator.urls as urls
def createDevice(id, url, platformId):
device = {
"id":id,
"@context": urls.contextUrl + "deviceContext.jsonld",
"@type":"Device",
"@id":url + "devices/" + id,
"dev:hasPlatform":url + "platforms/" + platformId
}
return device
def createPlatform(id, url, specific_sensor, specific_actuator):
platform = {
"id":id,
"@context": urls.contextUrl + "platformContext.jsonld",
"@type":"Platform",
"@id":url + "platforms/" + id,
"brand":"Plataforma " + id,
"dev:hasSensor":url + specific_sensor,
"dev:hasActuator":url + specific_actuator
}
return platform
def createMeasurement(url, deviceId, variableId, value):
timestamp = str (datetime.now())
id = str(random.randint(0,10000)) + str(datetime.now().microsecond) +\
str(random.randint(0,10000))
measurement = {
"id": id,
"@context": urls.contextUrl + "measurementContext.jsonld",
"@type": "Measurement",
"@id": url + "measurements/" + id,
"dev:wasMeasuredBy": url + "devices/" + deviceId,
"dev:valueOf": url + "variables/" + variableId,
"value": value,
"timestamp": timestamp
}
return measurement
|
<!DOCTYPE html>
<title>Reference for WebVTT rendering, cue text should be rerendered when overriding them using the DOM APIs while paused</title>
<style>
html { overflow:hidden }
body { margin:0 }
.video {
display: inline-block;
width: 320px;
height: 180px;
position: relative;
font-size: 9px;
}
.cue {
position: absolute;
bottom: 0;
left: 0;
right: 0;
text-align: center
}
.cue > span {
font-family: Ahem, sans-serif;
background: rgba(0,0,0,0.8);
color: green;
}
</style>
<div class="video"><span class="cue"><span>f o o</span></span></div>
|
package master.flame.danmaku.danmaku.model.android;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.text.Layout;
import android.text.Spannable;
import android.text.SpannableStringBuilder;
import android.text.Spanned;
import android.text.SpannedString;
import android.text.StaticLayout;
import android.text.TextPaint;
import java.lang.ref.SoftReference;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
/**
* Created by ch on 15-7-16.
*/
public class SpannedCacheStuffer extends SimpleTextCacheStuffer {
@Override
public void measure(BaseDanmaku danmaku, TextPaint paint) {
if (danmaku.text instanceof Spanned) {
CharSequence text = null;
if (danmaku.text instanceof SpannableStringBuilder) {
text = new SpannableStringBuilder(danmaku.text);
} else if (danmaku.text instanceof Spannable) {
text = Spannable.Factory.getInstance().newSpannable(danmaku.text);
} else if (danmaku.text instanceof SpannedString) {
text = new SpannedString(danmaku.text);
}
if (text != null) {
StaticLayout staticLayout = new StaticLayout(text, paint, (int) StaticLayout.getDesiredWidth(danmaku.text, paint), Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, true);
danmaku.paintWidth = staticLayout.getWidth();
danmaku.paintHeight = staticLayout.getHeight();
danmaku.obj = new SoftReference<StaticLayout>(staticLayout);
return;
}
}
super.measure(danmaku, paint);
}
@Override
public void drawStroke(BaseDanmaku danmaku, String lineText, Canvas canvas, float left, float top, Paint paint) {
if (danmaku.obj == null) {
super.drawStroke(danmaku, lineText, canvas, left, top, paint);
}
}
@Override
public void drawText(BaseDanmaku danmaku, String lineText, Canvas canvas, float left, float top, Paint paint) {
if (danmaku.obj == null) {
super.drawText(danmaku, lineText, canvas, left, top, paint);
return;
}
SoftReference<StaticLayout> reference = (SoftReference<StaticLayout>) danmaku.obj;
StaticLayout staticLayout = reference.get();
if (staticLayout == null) {
super.drawText(danmaku, lineText, canvas, left, top, paint);
return;
}
boolean needRestore = false;
if (left != 0 && top != 0) {
canvas.save();
canvas.translate(left, top + paint.ascent());
needRestore = true;
}
staticLayout.draw(canvas);
if (needRestore) {
canvas.restore();
}
}
@Override
public void clearCaches() {
super.clearCaches();
System.gc();
}
}
|
# -*- coding: utf-8 -*-
"""
Map tile acquisition
--------------------
Demonstrates cartopy's ability to draw map tiles which are downloaded on
demand from the Stamen tile server. Internally these tiles are then combined
into a single image and displayed in the cartopy GeoAxes.
"""
__tags__ = ["Scalar data"]
import matplotlib.pyplot as plt
from matplotlib.transforms import offset_copy
import cartopy.crs as ccrs
import cartopy.io.img_tiles as cimgt
def main():
# Create a Stamen terrain background instance.
stamen_terrain = cimgt.Stamen('terrain-background')
fig = plt.figure()
# Create a GeoAxes in the tile's projection.
ax = fig.add_subplot(1, 1, 1, projection=stamen_terrain.crs)
# Limit the extent of the map to a small longitude/latitude range.
ax.set_extent([-22, -15, 63, 65], crs=ccrs.Geodetic())
# Add the Stamen data at zoom level 8.
ax.add_image(stamen_terrain, 8)
# Add a marker for the Eyjafjallajökull volcano.
ax.plot(-19.613333, 63.62, marker='o', color='red', markersize=12,
alpha=0.7, transform=ccrs.Geodetic())
# Use the cartopy interface to create a matplotlib transform object
# for the Geodetic coordinate system. We will use this along with
# matplotlib's offset_copy function to define a coordinate system which
# translates the text by 25 pixels to the left.
geodetic_transform = ccrs.Geodetic()._as_mpl_transform(ax)
text_transform = offset_copy(geodetic_transform, units='dots', x=-25)
# Add text 25 pixels to the left of the volcano.
ax.text(-19.613333, 63.62, u'Eyjafjallajökull',
verticalalignment='center', horizontalalignment='right',
transform=text_transform,
bbox=dict(facecolor='sandybrown', alpha=0.5, boxstyle='round'))
plt.show()
if __name__ == '__main__':
main()
|
<?xml version="1.0" encoding="iso-8859-1"?>
<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html>
<head>
<title>show_rdoc_for_pattern (Gem::Server)</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
<link rel="stylesheet" href="../../.././rdoc-style.css" type="text/css" media="screen" />
</head>
<body class="standalone-code">
<pre><span class="ruby-comment cmt"># File lib/rubygems/server.rb, line 726</span>
<span class="ruby-keyword kw">def</span> <span class="ruby-identifier">show_rdoc_for_pattern</span>(<span class="ruby-identifier">pattern</span>, <span class="ruby-identifier">res</span>)
<span class="ruby-identifier">found_gems</span> = <span class="ruby-constant">Dir</span>.<span class="ruby-identifier">glob</span>(<span class="ruby-node">"{#{@gem_dirs.join ','}}/doc/#{pattern}"</span>).<span class="ruby-identifier">select</span> {<span class="ruby-operator">|</span><span class="ruby-identifier">path</span><span class="ruby-operator">|</span>
<span class="ruby-constant">File</span>.<span class="ruby-identifier">exist?</span> <span class="ruby-constant">File</span>.<span class="ruby-identifier">join</span>(<span class="ruby-identifier">path</span>, <span class="ruby-value str">'rdoc/index.html'</span>)
}
<span class="ruby-keyword kw">case</span> <span class="ruby-identifier">found_gems</span>.<span class="ruby-identifier">length</span>
<span class="ruby-keyword kw">when</span> <span class="ruby-value">0</span>
<span class="ruby-keyword kw">return</span> <span class="ruby-keyword kw">false</span>
<span class="ruby-keyword kw">when</span> <span class="ruby-value">1</span>
<span class="ruby-identifier">new_path</span> = <span class="ruby-constant">File</span>.<span class="ruby-identifier">basename</span>(<span class="ruby-identifier">found_gems</span>[<span class="ruby-value">0</span>])
<span class="ruby-identifier">res</span>.<span class="ruby-identifier">status</span> = <span class="ruby-value">302</span>
<span class="ruby-identifier">res</span>[<span class="ruby-value str">'Location'</span>] = <span class="ruby-node">"/doc_root/#{new_path}/rdoc/index.html"</span>
<span class="ruby-keyword kw">return</span> <span class="ruby-keyword kw">true</span>
<span class="ruby-keyword kw">else</span>
<span class="ruby-identifier">doc_items</span> = []
<span class="ruby-identifier">found_gems</span>.<span class="ruby-identifier">each</span> <span class="ruby-keyword kw">do</span> <span class="ruby-operator">|</span><span class="ruby-identifier">file_name</span><span class="ruby-operator">|</span>
<span class="ruby-identifier">base_name</span> = <span class="ruby-constant">File</span>.<span class="ruby-identifier">basename</span>(<span class="ruby-identifier">file_name</span>)
<span class="ruby-identifier">doc_items</span> <span class="ruby-operator"><<</span> {
<span class="ruby-identifier">:name</span> =<span class="ruby-operator">></span> <span class="ruby-identifier">base_name</span>,
<span class="ruby-identifier">:url</span> =<span class="ruby-operator">></span> <span class="ruby-node">"/doc_root/#{base_name}/rdoc/index.html"</span>,
<span class="ruby-identifier">:summary</span> =<span class="ruby-operator">></span> <span class="ruby-value str">''</span>
}
<span class="ruby-keyword kw">end</span>
<span class="ruby-identifier">template</span> = <span class="ruby-constant">ERB</span>.<span class="ruby-identifier">new</span>(<span class="ruby-constant">RDOC_SEARCH_TEMPLATE</span>)
<span class="ruby-identifier">res</span>[<span class="ruby-value str">'content-type'</span>] = <span class="ruby-value str">'text/html'</span>
<span class="ruby-identifier">result</span> = <span class="ruby-identifier">template</span>.<span class="ruby-identifier">result</span> <span class="ruby-identifier">binding</span>
<span class="ruby-identifier">res</span>.<span class="ruby-identifier">body</span> = <span class="ruby-identifier">result</span>
<span class="ruby-keyword kw">return</span> <span class="ruby-keyword kw">true</span>
<span class="ruby-keyword kw">end</span>
<span class="ruby-keyword kw">end</span></pre>
</body>
</html>
|
/**
* 绑定用户小名片
*/
function ucard() {
$('[ucard]').qtip({ // Grab some elements to apply the tooltip to
suppress: true,
content: {
text: function (event, api) {
var uid = $(this).attr('ucard');
$.get(U('Ucenter/Public/getProfile'), {uid: uid}, function (userProfile) {
var follow = '';
if ((MID != uid) && (MID != 0)) {
follow = '<button type="button" class="btn btn-default" onclick="talker.start_talk(' + userProfile.uid + ')" style="float: right;margin: 5px 0;padding: 2px 12px;margin-left: 8px;">聊 天</button>';
if (userProfile.followed == 1) {
follow += '<button type="button" class="btn btn-default" data-role="unfollow" data-follow-who="'+userProfile.uid+'" style="float: right;margin: 5px 0;padding: 2px 12px;"><font title="取消关注">已关注</font></button>';
} else {
follow += '<button type="button" class="btn btn-primary" data-role="follow" data-follow-who="'+userProfile.uid+'" style="float: right;margin: 5px 0;padding: 2px 12px;">关 注</button>';
}
}
var html = '<div class="row" style="width: 350px;width: 350px;font-size: 13px;line-height: 23px;">' +
'<div class="col-xs-12" style="padding: 2px;">' +
'<img class="img-responsive" src="' + window.Think.ROOT + '/Public/images/qtip_bg.png">' +
'</div>' +
'<div class="col-xs-12" style="padding: 2px;margin-top: -25px;">' +
'<div class="col-xs-3">' +
'<img src="{$userProfile.avatar64}" class="avatar-img img-responsive" style=""/>' +
'</div>' +
'<div class="col-xs-9" style="padding-top: 25px;padding-right:0px;font-size: 12px;">' +
'<div style="font-size: 16px;font-weight: bold;"><a href="{$userProfile.space_url}" title="">{$userProfile.nickname}</a>{$userProfile.rank_link}' +
'</div>' +
'<div>' +
'<a href="{$userProfile.following_url}" title="我的关注" target="_black">关注:{$userProfile.following}</a> ' +
'<a href="{$userProfile.fans_url}" title="我的关注" target="_black">粉丝:{$userProfile.fans}</a> ' +
'</div>' +
'<div style="margin-bottom: 15px;color: #848484">' +
'个性签名:' +
'<span>' +
'{$userProfile.signature}' +
'</span>' +
'</div>' +
'</div>' +
'</div>' +
'<div class="col-xs-12" style="background: #f1f1f1;">' +
follow +
'</div>' +
'</div>';
userProfile.signature = userProfile.signature === '' ? '还没想好O(∩_∩)O' : userProfile.signature;
for (var key in userProfile) {
html = html.replace('{$userProfile.' + key + '}', userProfile[key]);
}
//alert(html);
var tpl = $(html);
api.set('content.text', tpl.html());
follower.bind_follow();
}, 'json');
return '获取数据中...'
}
}, position: {
viewport: $(window)
}, show: {
solo: true,
delay: 500
}, style: {
classes: 'qtip-bootstrap'
}, hide: {
delay: 500, fixed: true
}
})
}
|
const config = require('../../../config.json');
const request = require('request');
module.exports = {
setGame: function(client){
client.user.setGame(`${config.prefix}help | ${client.guilds.size} servers`);
},
discordpw: function(client) {
request.post({
url: 'https://bots.discord.pw/api/bots/'+client.user.id+'/stats',
json: true,
headers: {
Authorization: config.tokens.discordpw
},
body: {
"server_count": client.guilds.size
}
}, function (error, response, body) {
});
},
discordlist: function(client) {
request.post({
url: 'https://bots.discordlist.net/api',
json: true,
body: {
"token": config.tokens.discordlist,
"servers": client.guilds.size
}
}, function (error, response, body) {
console.log(body)
});
}
};
|
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("03.GroupNumbers")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("03.GroupNumbers")]
[assembly: AssemblyCopyright("Copyright © 2017")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("06126967-90f1-4b63-819c-ebb91edb557a")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Rgaa 4.0 Test.4.11.2 Passed 01</title>
</head>
<body class="Passed">
<div>
<h1>Rgaa 4.0 Test.4.11.2 Passed 01</h1>
<!-- START [test-detail] -->
<div class="test-detail" lang="fr">
<p>Pour chaque <a href="https://www.numerique.gouv.fr/publications/rgaa-accessibilite/methode/glossaire/#media-temporel-type-son-video-et-synchronise">média temporel</a>, chaque fonctionnalité vérifie-t-elle une de ces conditions ?</p>
<ul>
<li>La fonctionnalité est <a href="https://www.numerique.gouv.fr/publications/rgaa-accessibilite/methode/glossaire/#accessible-et-activable-par-le-clavier-et-tout-dispositif-de-pointage">accessible par le clavier et tout dispositif de pointage</a>.</li>
<li>Une fonctionnalité <a href="https://www.numerique.gouv.fr/publications/rgaa-accessibilite/methode/glossaire/#accessible-et-activable-par-le-clavier-et-tout-dispositif-de-pointage">accessible par le clavier et tout dispositif de pointage</a> permettant de réaliser la même action est présente dans la page.</li>
</ul>
</div>
<!-- END [test-detail] -->
<!-- START [testcase] -->
<div class="testcase">
</div>
<!-- END [testcase] -->
<!-- START [test-explanation] -->
<div class="test-explanation">
Passed.
</div>
<!-- END [test-explanation] -->
</div>
</body>
</html>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 NEC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from tempest.common.rest_client import RestClient
class AvailabilityZoneClientJSON(RestClient):
def __init__(self, config, username, password, auth_url, tenant_name=None):
super(AvailabilityZoneClientJSON, self).__init__(config, username,
password, auth_url,
tenant_name)
self.service = self.config.compute.catalog_type
def get_availability_zone_list(self):
resp, body = self.get('os-availability-zone')
body = json.loads(body)
return resp, body['availabilityZoneInfo']
def get_availability_zone_list_detail(self):
resp, body = self.get('os-availability-zone/detail')
body = json.loads(body)
return resp, body['availabilityZoneInfo']
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Base functionality for handling HTTP requests for built-in pages."""
import re
from google.appengine.tools.devappserver2 import url_handler
class WSGIHandler(url_handler.URLHandler):
"""Simple handler that matches a regex and runs a WSGI application.
WSGIHandler does not perform authorization; the authorization check always
succeeds.
"""
def __init__(self, wsgi_app, url_pattern):
"""Initializer for WSGIHandler.
Args:
wsgi_app: A WSGI application function as defined in PEP-333.
url_pattern: A regular expression string containing the pattern for URLs
handled by this handler. Unlike user-provided patterns in app.yaml,
the pattern is not required to match the whole URL, only the start.
(End the pattern with '$' to explicitly match the whole string.)
Raises:
re.error: url_pattern was not a valid regular expression.
"""
super(WSGIHandler, self).__init__(re.compile(url_pattern))
self._wsgi_app = wsgi_app
def handle(self, unused_match, environ, start_response):
"""Serves the content associated with this handler.
Args:
unused_match: Unused.
environ: An environ dict for the current request as defined in PEP-333.
start_response: A function with semantics defined in PEP-333.
Returns:
An iterable over strings containing the body of the HTTP response.
"""
return self._wsgi_app(environ, start_response)
|
#import <UIKit/UIKit.h>
@interface ESArrayBackedTableViewController : UITableViewController
@property(strong) NSArray* cellData;
@property(strong) NSString* textKey;
@property(strong) NSString* detailKey;
@property(assign) UITableViewCellStyle cellStyle;
@property(readonly) BOOL usesSections;
-(void)convertToIndex:(id(^)())indexTitle;
-(void)convertToAlphaIndex;
-(void)configureCellForData:(id)o;
//Override these:
-(NSArray*)cellData;
-(void)configureCell:(UITableViewCell*)c withData:(id)o;
-(void)didSelectCellWithData:(id)o;
-(NSIndexPath*)indexPathForObject:(id)data;
-(void)scrollToObject:(id)data animated:(BOOL)animated;
@end
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import yaml
from ansible.compat.six import PY3
from ansible.parsing.yaml.objects import AnsibleUnicode, AnsibleSequence, AnsibleMapping
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
from ansible.vars.hostvars import HostVars
from ansible.vars.unsafe_proxy import AnsibleUnsafeText
class AnsibleDumper(yaml.SafeDumper):
'''
A simple stub class that allows us to add representers
for our overridden object types.
'''
pass
def represent_hostvars(self, data):
return self.represent_dict(dict(data))
# Note: only want to represent the encrypted data
def represent_vault_encrypted_unicode(self, data):
return self.represent_scalar(u'!vault', data._ciphertext.decode(), style='|')
if PY3:
represent_unicode = yaml.representer.SafeRepresenter.represent_str
else:
represent_unicode = yaml.representer.SafeRepresenter.represent_unicode
AnsibleDumper.add_representer(
AnsibleUnicode,
represent_unicode,
)
AnsibleDumper.add_representer(
AnsibleUnsafeText,
represent_unicode,
)
AnsibleDumper.add_representer(
HostVars,
represent_hostvars,
)
AnsibleDumper.add_representer(
AnsibleSequence,
yaml.representer.SafeRepresenter.represent_list,
)
AnsibleDumper.add_representer(
AnsibleMapping,
yaml.representer.SafeRepresenter.represent_dict,
)
AnsibleDumper.add_representer(
AnsibleVaultEncryptedUnicode,
represent_vault_encrypted_unicode,
)
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace JCTest
{
class Program
{
static void Main(string[] args)
{
}
}
}
|
<?php
/* core/modules/views/templates/views-mini-pager.html.twig */
class __TwigTemplate_54bf9b90f1252d512b65eef41a09e62d6d8e8e3fcf6ce0cd0956e6e77d068c16 extends Twig_Template
{
public function __construct(Twig_Environment $env)
{
parent::__construct($env);
$this->parent = false;
$this->blocks = array(
);
}
protected function doDisplay(array $context, array $blocks = array())
{
// line 14
if ((isset($context["items"]) ? $context["items"] : null)) {
// line 15
echo " <h2 class=\"visually-hidden\">";
echo twig_render_var(t("Pages"));
echo "</h2>
";
// line 16
echo twig_drupal_escape_filter($this->env, (isset($context["items"]) ? $context["items"] : null), "html", null, true);
echo "
";
}
}
public function getTemplateName()
{
return "core/modules/views/templates/views-mini-pager.html.twig";
}
public function isTraitable()
{
return false;
}
public function getDebugInfo()
{
return array ( 26 => 16, 21 => 15, 19 => 14,);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.