text
stringlengths 2
6.14k
|
|---|
# -*- coding: utf-8 -*-
import types
import wx
from classes.ui import UIManager
from classes.ui import UIControllerObject
from classes.ui import UIViewObject
from app import log
class MenuItemController(UIControllerObject):
tid = 'menu_item_controller'
_ATTRIBUTES = {
'pos': {'default_value': -1,
'type': int
},
'id': {'default_value': wx.ID_ANY,
'type': int
},
'label': {'default_value': wx.EmptyString,
'type': str
},
'help': {'default_value': wx.EmptyString,
'type': str
},
'kind': {'default_value': wx.ITEM_NORMAL,
'type': int
},
'callback': {'default_value': None,
'type': types.FunctionType
}
}
def __init__(self, **state):
super().__init__(**state)
def PostInit(self):
_UIM = UIManager()
parent_controller_uid = _UIM._getparentuid(self.uid)
parent_controller = _UIM.get(parent_controller_uid)
parent_controller.insert_menu_item(self)
def PreRemove(self):
_UIM = UIManager()
parent_controller_uid = _UIM._getparentuid(self.uid)
parent_controller = _UIM.get(parent_controller_uid)
parent_controller.remove_menu_item(self)
class MenuItemView(UIViewObject, wx.MenuItem):
tid = 'menu_item_view'
def __init__(self, controller_uid):
UIViewObject.__init__(self, controller_uid)
_UIM = UIManager()
controller = _UIM.get(self._controller_uid)
if controller.id == wx.ID_ANY:
controller.id = _UIM.new_wx_id()
try:
wx.MenuItem.__init__(self, None, controller.id, controller.label,
controller.help, controller.kind
)
except Exception as e:
print(e)
raise
def PostInit(self):
log.debug('{}.PostInit started'.format(self.name))
_UIM = UIManager()
controller = _UIM.get(self._controller_uid)
parent_controller_uid = _UIM._getparentuid(self._controller_uid)
parent_controller = _UIM.get(parent_controller_uid)
if controller.pos == -1:
# Appending - Not needed to declare pos
controller.pos = parent_controller.view.GetMenuItemCount()
if controller.pos > parent_controller.view.GetMenuItemCount():
# If pos was setted out of range for inserting in parent Menu
msg = 'Invalid menu position for MenuItem with text={}. Position will be setting to {}'.format(
controller.label, parent_controller.view.GetMenuItemCount())
log.warning(msg)
controller.pos = parent_controller.view.GetMenuItemCount()
log.debug('{}.PostInit ended'.format(self.name))
|
// I18N constants
// LANG: "ru", ENCODING: UTF-8N
{
"Click a color..." : "Выберите цвет ...",
"Close" : "близко",
"Color: " : "Цвет:",
"Sample" : "образец",
"Web Safe:" : "Web Safe:"
};
|
/*
* This file is part of UltimateCore, licensed under the MIT License (MIT).
*
* Copyright (c) Bammerbom
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package bammerbom.ultimatecore.sponge.modules.spy.commands;
import bammerbom.ultimatecore.sponge.UltimateCore;
import bammerbom.ultimatecore.sponge.api.command.HighCommand;
import bammerbom.ultimatecore.sponge.api.command.annotations.CommandInfo;
import bammerbom.ultimatecore.sponge.api.command.argument.Arguments;
import bammerbom.ultimatecore.sponge.api.command.argument.arguments.PlayerArgument;
import bammerbom.ultimatecore.sponge.api.language.utils.Messages;
import bammerbom.ultimatecore.sponge.api.permission.Permission;
import bammerbom.ultimatecore.sponge.api.user.UltimateUser;
import bammerbom.ultimatecore.sponge.api.variable.utils.VariableUtil;
import bammerbom.ultimatecore.sponge.modules.spy.SpyModule;
import bammerbom.ultimatecore.sponge.modules.spy.api.SpyKeys;
import bammerbom.ultimatecore.sponge.modules.spy.api.SpyPermissions;
import org.spongepowered.api.command.CommandException;
import org.spongepowered.api.command.CommandResult;
import org.spongepowered.api.command.CommandSource;
import org.spongepowered.api.command.args.CommandContext;
import org.spongepowered.api.command.args.CommandElement;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.text.Text;
import java.util.Arrays;
import java.util.List;
@CommandInfo(module = SpyModule.class, aliases = {"messagespy", "msgspy", "spymessage", "spymsg", "whisperspy", "spywhisper"})
public class MessagespyCommand implements HighCommand {
@Override
public Permission getPermission() {
return SpyPermissions.UC_SPY_MESSAGESPY_BASE;
}
@Override
public List<Permission> getPermissions() {
return Arrays.asList(SpyPermissions.UC_SPY_MESSAGESPY_BASE, SpyPermissions.UC_SPY_MESSAGESPY_OTHERS);
}
@Override
public CommandElement[] getArguments() {
return new CommandElement[]{
Arguments.builder(new PlayerArgument(Text.of("player"))).onlyOne().optional().build()
};
}
@Override
public CommandResult execute(CommandSource sender, CommandContext args) throws CommandException {
checkPermission(sender, SpyPermissions.UC_SPY_MESSAGESPY_BASE);
if (!args.hasAny("player")) {
//Toggle own
checkIfPlayer(sender);
Player p = (Player) sender;
UltimateUser user = UltimateCore.get().getUserService().getUser(p);
boolean status = user.get(SpyKeys.MESSAGESPY_ENABLED).get();
status = !status;
user.offer(SpyKeys.MESSAGESPY_ENABLED, status);
Messages.send(sender, "spy.command.messagespy.self", "%status%", status ? Messages.getFormatted("spy.enabled") : Messages.get("spy.disabled"));
return CommandResult.success();
} else {
//Toggle someone else
checkPermission(sender, SpyPermissions.UC_SPY_MESSAGESPY_OTHERS);
Player t = args.<Player>getOne("player").get();
UltimateUser user = UltimateCore.get().getUserService().getUser(t);
boolean status = user.get(SpyKeys.MESSAGESPY_ENABLED).get();
status = !status;
user.offer(SpyKeys.MESSAGESPY_ENABLED, status);
Messages.send(t, "spy.command.messagespy.self", "%status%", status ? Messages.getFormatted("spy.enabled") : Messages.get("spy.disabled"));
Messages.send(sender, "spy.command.messagespy.others", "%status%", status ? Messages.getFormatted("spy.enabled") : Messages.get("spy.disabled"), "%player%", VariableUtil.getNameEntity(t));
return CommandResult.success();
}
}
}
|
from datetime import datetime
from slugify import slugify
from sqlalchemy import event
from sqlalchemy.orm import Session
from app import db
from app.utils.database import CRUDMixin
tags_posts = db.Table('tags_posts',
db.Column('post_id', db.Integer, db.ForeignKey('post.id', ondelete='cascade')),
db.Column('tag_id', db.Integer, db.ForeignKey('tag.id', ondelete='cascade')))
class Post(db.Model, CRUDMixin):
__searchable__ = ['title', 'short_text', 'long_text']
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(120), nullable=False)
short_text = db.Column(db.String(1000))
long_text = db.Column(db.String(10000))
timestamp = db.Column(db.DateTime)
slug = db.Column(db.String(160))
tags = db.relationship('Tag', secondary=tags_posts, lazy='dynamic',
backref=db.backref('posts', lazy='dynamic'))
post_id = db.Column(db.Integer, db.ForeignKey('user.id'))
__table_args__ = (db.CheckConstraint('NOT(short_text IS NULL AND long_text IS NULL)'),)
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.timestamp = datetime.utcnow()
def save(self):
"""
Creates the slug and saves the post.
Returns:
The Post object
"""
self.slug = slugify(self.title)
return super().save()
class Tag(db.Model, CRUDMixin):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(60), nullable=False)
slug = db.Column(db.String(80), nullable=False)
@event.listens_for(Session, 'after_flush')
def delete_tag_orphans(session, ctx):
""" Deletes all Tag objects with no posts. """
session.query(Tag).filter(~Tag.posts.any()).delete(synchronize_session=False)
def before_tag_insert_listener(mapper, connection, target):
""" Creates the slug for a Tag object. """
target.slug = slugify(target.name)
event.listen(Tag, 'before_insert', before_tag_insert_listener)
|
#! python
# -*- coding: utf-8 -*-
# (c) 2011 Adrian Przekwas LGPL
from __future__ import division # allows floating point division from integers
import FreeCAD, Part
from FreeCAD import Base
class MySpring:
def __init__(self, obj):
''' Add the properties: Pitch, Diameter, Height, BarDiameter '''
obj.addProperty("App::PropertyLength","Pitch","MySpring","Pitch of the helix").Pitch=5.0
obj.addProperty("App::PropertyLength","Diameter","MySpring","Diameter of the helix").Diameter=6.0
obj.addProperty("App::PropertyLength","Height","MySpring","Height of the helix").Height=30.0
obj.addProperty("App::PropertyLength","BarDiameter","MySpring","Diameter of the bar").BarDiameter=3.0
obj.Proxy = self
def onChanged(self, fp, prop):
if prop == "Pitch" or prop == "Diameter" or prop == "Height" or prop == "BarDiameter":
self.execute(fp)
def execute(self, fp):
pitch = fp.Pitch
radius = fp.Diameter/2
height = fp.Height
barradius = fp.BarDiameter/2
myhelix=Part.makeHelix(pitch,height,radius)
g=myhelix.Edges[0].Curve
c=Part.Circle()
c.Center=g.value(0) # start point of the helix
c.Axis=(0,1,0)
c.Radius=barradius
p=c.toShape()
section = Part.Wire([p])
makeSolid=1 #change to 1 to make a solid
isFrenet=1
myspring=Part.Wire(myhelix).makePipeShell([section],makeSolid,isFrenet)
fp.Shape = myspring
def makeMySpring():
doc = FreeCAD.activeDocument()
if doc is None:
doc = FreeCAD.newDocument()
spring=doc.addObject("Part::FeaturePython","My_Spring")
spring.Label = "My Spring"
MySpring(spring)
spring.ViewObject.Proxy=0
doc.recompute()
if __name__ == "__main__":
makeMySpring()
|
import $ from 'jquery';
import ModalFormView from 'components/common/modal-form';
import './templates';
export default ModalFormView.extend({
template: Templates['quality-profiles-rename-profile'],
onFormSubmit: function () {
ModalFormView.prototype.onFormSubmit.apply(this, arguments);
this.sendRequest();
},
sendRequest: function () {
var that = this,
url = baseUrl + '/api/qualityprofiles/rename',
name = this.$('#rename-profile-name').val(),
options = {
key: this.model.get('key'),
name: name
};
return $.ajax({
type: 'POST',
url: url,
data: options,
statusCode: {
// do not show global error
400: null
}
}).done(function () {
that.model.set({ name: name });
that.destroy();
}).fail(function (jqXHR) {
that.showErrors(jqXHR.responseJSON.errors, jqXHR.responseJSON.warnings);
});
}
});
|
import React, { PropTypes } from 'react'
const Todo = ({ onClick, completed, text }) => (
<li
onClick={onClick}
style={{textDecoration: completed ? 'line-through': 'none'}}
>
{text}
</li>
)
Todo.propTypes = {
text: PropTypes.string.isRequired
}
export default Todo
|
import {AudioWrapper} from './util/audio-wrapper';
import {ChartComponent} from './components/chart/chart.component';
import {TestBed, async} from '@angular/core/testing';
import {AppComponent} from './app.component';
import {FileSelectorComponent} from './components/file-selector/file-selector.component';
import {MdCardModule, MdDialogModule, MdDialogRef, MdInputModule} from '@angular/material';
class MdDialogRefMock {
}
describe('AppComponent', () => {
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [
AppComponent,
ChartComponent,
FileSelectorComponent
],
imports: [
MdInputModule,
MdCardModule,
MdDialogModule
],
providers: [
AudioWrapper,
{provide: MdDialogRef, use: MdDialogRefMock}
]
}).compileComponents();
}));
it('should create the app', async(() => {
const fixture = TestBed.createComponent(AppComponent);
const app = fixture.debugElement.componentInstance;
expect(app).toBeTruthy();
}));
});
|
package org.kantega.playground.fresh.srv;
import fj.Ord;
import fj.data.TreeMap;
import static fj.P.p;
public class Response {
public final Status status;
public final String body;
public final TreeMap<String, String> headers;
public Response(Status status, String body, TreeMap<String, String> headers) {
this.status = status;
this.body = body;
this.headers = headers;
}
public Response withHeader(String key, String value) {
return new Response(status, body, headers.set(key, value));
}
public static Response string(String body) {
return new Response(Status.OK, body, TreeMap.treeMap(Ord.stringOrd, p("Content-Type", "text/plain")));
}
public static Response json(String body) {
return new Response(Status.OK, body, TreeMap.treeMap(Ord.stringOrd, p("Content-Type", "application/json")));
}
public enum StatusFamily {success, redirect, clienterror, servererror}
public enum Status {
OK(StatusFamily.success, 200),
NoContent(StatusFamily.success, 204),
BadRequest(StatusFamily.clienterror, 201),
Forbidden(StatusFamily.clienterror, 401),
NotFound(StatusFamily.clienterror, 404),
ServerError(StatusFamily.servererror, 500);
//todo etc...
public final StatusFamily family;
public final int code;
Status( StatusFamily family,int code) {
this.code = code;
this.family = family;
}
}
}
|
import json
from datetime import datetime
import requests
from discord.errors import HTTPException
from sazabi.model import Channel
from sazabi.types import SazabiBotPlugin
from sazabi.util import create_session
from requests.exceptions import ConnectionError
class Twitch(SazabiBotPlugin):
async def parse(self, client, message, *args, **kwargs):
session = create_session() # type: sqlalchemy.orm.session.Session
client_id = kwargs.get('client_id')
headers = {'Client-ID': client_id}
for channel in session.query(Channel).all():
user_login = channel.channel_name
try:
response = requests.get('https://api.twitch.tv/helix/streams?user_login='
+ user_login, headers=headers)
if response.status_code == 200:
result = json.loads(response.text)
streams = result.get('data')
# update status
channel = session.query(Channel).filter(
Channel.channel_name == user_login).first()
notify = False # send a message update when going online
if len(streams) > 0:
if not channel.live:
# became live
notify = True
self.logger.debug('Stream {} is live'.format(user_login))
channel.live = True
else:
channel.live = False
self.logger.debug('Stream {} is offline'.format(user_login))
channel.last_updated = datetime.now()
if notify:
channel.last_change = datetime.now()
self.logger.info(
'Send update, {} went live'.format(channel.channel_name))
await self.send_update(client, channel.channel_name)
else:
self.logger.info('No stream changes')
session.commit()
else:
self.logger.error(
"Could not connect to twitch: {}, {}".format(response.status_code,
response.text))
except ConnectionError:
self.logger.error("Could not connect to twitch")
# close the session
session.close()
async def send_update(self, client, stream_name):
channels = [c for c in client.get_all_channels() if
'general' in c.name.lower()]
for c in channels:
self.logger.info("Sending update to channel #{}".format("c.name"))
message = 'Stream {} went online! https://twitch.tv/{}'.format(
stream_name, stream_name)
await self.send_message_wrapper(client, c, message)
async def send_message_wrapper(self, client, channel, message):
self.logger.info("Sending message: {}".format(message))
try:
await client.send_message(channel, message)
except HTTPException as e:
self.logger.error(
"Unable to send to channel #{}: {}".format(channel.name, e.text))
|
from rpython.rtyper.lltypesystem import lltype
from rpython.rtyper.rmodel import inputconst
from rpython.tool.ansi_print import AnsiLogger
from rpython.translator.simplify import get_graph
log = AnsiLogger("backendopt")
def graph_operations(graph):
for block in graph.iterblocks():
for op in block.operations:
yield op
def all_operations(graphs):
for graph in graphs:
for block in graph.iterblocks():
for op in block.operations:
yield op
def annotate(translator, func, result, args):
args = [arg.concretetype for arg in args]
graph = translator.rtyper.annotate_helper(func, args)
fptr = lltype.functionptr(lltype.FuncType(args, result.concretetype), func.func_name, graph=graph)
c = inputconst(lltype.typeOf(fptr), fptr)
return c
def var_needsgc(var):
vartype = var.concretetype
return isinstance(vartype, lltype.Ptr) and vartype._needsgc()
def find_calls_from(translator, graph, memo=None):
if memo and graph in memo:
return memo[graph]
res = [i for i in _find_calls_from(translator, graph)]
if memo is not None:
memo[graph] = res
return res
def _find_calls_from(translator, graph):
for block in graph.iterblocks():
for op in block.operations:
if op.opname == "direct_call":
called_graph = get_graph(op.args[0], translator)
if called_graph is not None:
yield block, called_graph
if op.opname == "indirect_call":
graphs = op.args[-1].value
if graphs is not None:
for called_graph in graphs:
yield block, called_graph
def find_backedges(graph, block=None, seen=None, seeing=None):
"""finds the backedges in the flow graph"""
backedges = []
if block is None:
block = graph.startblock
if seen is None:
seen = set([block])
if seeing is None:
seeing = set()
seeing.add(block)
for link in block.exits:
if link.target in seen:
if link.target in seeing:
backedges.append(link)
else:
seen.add(link.target)
backedges.extend(find_backedges(graph, link.target, seen, seeing))
seeing.remove(block)
return backedges
def compute_reachability(graph):
reachable = {}
blocks = list(graph.iterblocks())
# Reversed order should make the reuse path more likely.
for block in reversed(blocks):
reach = set()
scheduled = [block]
while scheduled:
current = scheduled.pop()
for link in current.exits:
if link.target in reachable:
reach.add(link.target)
reach = reach | reachable[link.target]
continue
if link.target not in reach:
reach.add(link.target)
scheduled.append(link.target)
reachable[block] = reach
return reachable
def find_loop_blocks(graph):
"""find the blocks in a graph that are part of a loop"""
loop = {}
reachable = compute_reachability(graph)
for backedge in find_backedges(graph):
start = backedge.target
end = backedge.prevblock
loop[start] = start
loop[end] = start
scheduled = [start]
seen = {}
while scheduled:
current = scheduled.pop()
connects = end in reachable[current]
seen[current] = True
if connects:
loop[current] = start
for link in current.exits:
if link.target not in seen:
scheduled.append(link.target)
return loop
def md5digest(translator):
from hashlib import md5
graph2digest = {}
for graph in translator.graphs:
m = md5()
for op in graph_operations(graph):
m.update(op.opname + str(op.result))
for a in op.args:
m.update(str(a))
graph2digest[graph.name] = m.digest()
return graph2digest
|
#!/usr/bin/env python
# Copyright (C) 2014 SEE AUTHORS FILE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Build script.
"""
import os.path
try:
from setuptools import setup, find_packages
except ImportError:
import warnings
warnings.warn('No setuptools. Script creation will be skipped.')
from distutils.core import setup
def parse_requirements(path):
with open(path, 'r') as install_reqs:
return install_reqs.read().splitlines()
reqs = parse_requirements('requirements.txt')
setup(
name='rerest',
version='0.0.12-6',
description="REST entrypoint to the core component of the Inception Release Engine",
author='See AUTHORS',
author_email='[email protected]',
url='https://github.com/RHInception/re-rest',
license='AGPLv3',
zip_safe=False,
package_dir={
'rerest': os.path.join('src', 'rerest')
},
package_data={
'rerest': ['templates/*.html', 'static/*']},
install_requires=reqs,
packages=find_packages('src'),
classifiers=[
('License :: OSI Approved :: GNU Affero General Public '
'License v3 or later (AGPLv3+)'),
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
],
)
|
package com.googlecode.zohhak.api;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.runner.RunWith;
import com.googlecode.zohhak.api.Coercion;
import com.googlecode.zohhak.api.TestWith;
import com.googlecode.zohhak.api.runners.ZohhakRunner;
import com.googlecode.zohhak.helper.SampleType;
import com.googlecode.zohhak.helper.SampleType2;
import com.googlecode.zohhak.helper.SecondCoercer;
@RunWith(ZohhakRunner.class)
public class ExtendedCoercerTest {
@TestWith(value="a", coercers=SecondCoercer.class)
public void addedCoercerNewTypeTest(SampleType sampleType) {
assertThat(sampleType.value).isEqualTo("a");
}
@TestWith(value="a, b", coercers=SecondCoercer.class)
public void addedCoercerOldTypeTest(SampleType sampleType, String string) {
assertThat(sampleType.value).isEqualTo("a");
assertThat(string).isEqualTo("b");
}
@TestWith(value="0xFF", coercers=SecondCoercer.class)
public void coerceHex(int param) {
assertThat(param).isEqualTo(255);
}
@TestWith(value="b1101", coercers=SecondCoercer.class)
public void assertionErrorHandling(int param) {
assertThat(param).isEqualTo(13);
}
@TestWith("a")
public void annotatedAssertionTest(SampleType2 sampleType) {
assertThat(sampleType.value).isEqualTo("a");
}
@TestWith(value="a", coercers=InnerCoercer.class)
public void innerCoercerTest(SampleType sampleType) {
assertThat(sampleType.value).isEqualTo("a");
}
@Coercion
public SampleType2 toSampleType2(String input) {
return new SampleType2(input);
}
public static class InnerCoercer {
public SampleType toSampleType(String input) {
return new SampleType(input);
}
}
}
|
using FluentNHibernate.Automapping;
using FluentNHibernate.Automapping.Alterations;
using designPatterns.Domain.Entities;
namespace designPatterns.Data
{
public class UserLoginSessionAutoMappingOverride : IAutoMappingOverride<UserLoginSession>
{
public void Override(AutoMapping<UserLoginSession> mapping)
{
}
}
}
|
# Copyright (c) 2015, Javier Gonzalez
# Copyright (c) 2015, the GPy Authors (see GPy AUTHORS.txt)
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from .bayesian_optimization import BayesianOptimization
class autoTune(BayesianOptimization):
def __init__(self, f, bounds=None, max_iter=None, eps = None, n_procs = 1, report_file = None):
'''
Automatic parameter tuner for computer models based on the GPyOPt class BayesianOptimization. Note that:
(1) The function is mainly tuned to provide accurate results so it may be slow is expensive examples.
(2) It is recomended to used as many processors are available for the opimization. This may significantly improve the resutls.
(3) This function depends on DIRECT, which is use to optimize the aquisition function.
:param *f* the function to optimize. Should get a nxp numpy array as input and return a nx1 numpy array.
:param bounds: Tuple containing the box constrains of the function to optimize. Example: for [0,1]x[0,1] insert [(0,1),(0,1)].
:param max_iter: exploration horizon, or number of acquisitions. It nothing is provided optimizes the current acquisition.
:param eps: minimum distance between two consecutive x's to keep running the model.
:param n_procs: number of CPUs to use in computation. Is set but default to be equal to the size of the betches collected.
:param report_file: name of the file in which the results of the optimization are saved.
'''
input_dim = len(bounds)
# ---- Initial number of data points
self.model_data_init = 5*input_dim
# ---- Maximum number of iterations
if max_iter==None:
max_iter = 15*input_dim
# ---- Parallel computation
n_inbatch = n_procs
# ---- tolerance
if eps==None:
self.eps = 1e-6
else:
self.eps = eps
# ---- File to save the report
if report_file==None:
self.report_file='GPyOpt-results.txt'
else:
self.report_file=report_file
# ----- Asign super class
super(autoTune,self).__init__(f,bounds,exact_feval=True)
# ---- Run optimization. Customized for automatic tuning. Reinforces the acquisition with a random location.
self.run_optimization( max_iter = max_iter,
n_inbatch=n_inbatch,
acqu_optimize_method='DIRECT',
acqu_optimize_restarts=200,
batch_method='random',
eps = eps,
n_procs=n_procs,
true_gradients = True,
save_interval=5,
report_file = report_file,
verbose=True)
|
#include "mmdb-extras.h"
#include "mmdb.h"
#include "mmdb-crystal.h"
#include "Bond_lines.h"
int main(int argc, char **argv) {
std::string file_name = "1yjp.geo";
if (argc > 1 )
file_name = argv[1];
coot::phenix_geo_bonds pgb(file_name);
bool test_coords = false;
if (argc > 2) {
std::string t(argv[2]);
if (t == "test-coords") {
test_coords = true;
}
}
if (test_coords) {
mmdb::Manager *mol = new mmdb::Manager;
int ierr = mol->ReadCoorFile("1yjp.pdb");
std::cout << "print ierr: " << ierr << std::endl;
if (ierr != mmdb::Error_NoError) {
std::cout << "Problem reading pdb file " << std::endl;
} else {
if (false) {
int imod = 1;
mmdb::Model *model_p = mol->GetModel(imod);
if (! model_p) {
std::cout << "Null model" << std::endl;
} else {
mmdb::Chain *chain_p;
int n_chains = model_p->GetNumberOfChains();
for (int ichain=0; ichain<n_chains; ichain++) {
chain_p = model_p->GetChain(ichain);
if (! chain_p) {
std::cout << "Null chain" << std::endl;
} else {
int nres = chain_p->GetNumberOfResidues();
mmdb::Residue *residue_p;
mmdb::Atom *at;
for (int ires=0; ires<nres; ires++) {
residue_p = chain_p->GetResidue(ires);
if (! residue_p) {
std::cout << "Null residue " << std::endl;
} else {
int n_atoms = residue_p->GetNumberOfAtoms();
// std::cout << "residue has " << n_atoms << " atoms " << std::endl;
for (int iat=0; iat<n_atoms; iat++) {
at = residue_p->GetAtom(iat);
std::cout << " " << iat << " " << coot::atom_spec_t(at) << std::endl;
}
}
}
}
}
}
}
Bond_lines_container blc(mol, pgb);
}
}
return 0;
}
|
<!doctype html>
<html lang="en">
<head>
<title>Code coverage report for express-stormpath/lib/helpers/get-required-registration-fields.js</title>
<meta charset="utf-8" />
<link rel="stylesheet" href="../../../prettify.css" />
<link rel="stylesheet" href="../../../base.css" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<style type='text/css'>
.coverage-summary .sorter {
background-image: url(../../../sort-arrow-sprite.png);
}
</style>
</head>
<body>
<div class='wrapper'>
<div class='pad1'>
<h1>
<a href="../../../index.html">all files</a> / <a href="index.html">express-stormpath/lib/helpers/</a> get-required-registration-fields.js
</h1>
<div class='clearfix'>
<div class='fl pad1y space-right2'>
<span class="strong">100% </span>
<span class="quiet">Statements</span>
<span class='fraction'>11/11</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">91.67% </span>
<span class="quiet">Branches</span>
<span class='fraction'>11/12</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">100% </span>
<span class="quiet">Functions</span>
<span class='fraction'>3/3</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">100% </span>
<span class="quiet">Lines</span>
<span class='fraction'>11/11</span>
</div>
</div>
</div>
<div class='status-line high'></div>
<pre><table class="coverage">
<tr><td class="line-count quiet">1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37</td><td class="line-coverage quiet"><span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">34×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">34×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">33×</span>
<span class="cline-any cline-yes">222×</span>
<span class="cline-any cline-yes">122×</span>
<span class="cline-any cline-yes">122×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">222×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">33×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span></td><td class="text"><pre class="prettyprint lang-js">'use strict';
var async = require('async');
/**
* @private
* @callback getRequiredRegistrationFieldsCallback
* @param {Array} fields - An array of required field names (as strings). Might
* be empty if the user explicitly disables all fields in the configuration.
*/
/**
* Gets a list of required registration fields.
*
* @param {Object} config - The Stormpath Configuration object.
* @param {getRequiredRegistrationFieldsCallback} callback - The callback to
* run.
*/
module.exports = function (config, callback) {
var fields = [];
if (!config || !config.web || !config.web.register) {
return callback([]);
}
async.forEachOf(config.web.register.form.fields || <span class="branch-1 cbranch-no" title="branch not covered" >{},</span> function (field, fieldName, cb) {
if (field && field.enabled && field.required) {
field.name = fieldName;
fields.push(field);
}
cb();
}, function () {
callback(fields);
});
};
</pre></td></tr>
</table></pre>
<div class='push'></div><!-- for sticky footer -->
</div><!-- /wrapper -->
<div class='footer quiet pad2 space-top1 center small'>
Code coverage
generated by <a href="http://istanbul-js.org/" target="_blank">istanbul</a> at Tue Mar 29 2016 15:50:23 GMT-0700 (PDT)
</div>
</div>
<script src="../../../prettify.js"></script>
<script>
window.onload = function () {
if (typeof prettyPrint === 'function') {
prettyPrint();
}
};
</script>
<script src="../../../sorter.js"></script>
</body>
</html>
|
<?xml version="1.0" encoding="iso-8859-1"?>
<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<!--
Methods
-->
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>Methods</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
<link rel="stylesheet" href="rdoc-style.css" type="text/css" />
<base target="docwin" />
</head>
<body>
<div id="index">
<h1 class="section-bar">Methods</h1>
<div id="index-entries">
<a href="classes/Greybox.html#M000005">greybox_advance_link_to_page (Greybox)</a><br />
<a href="classes/Greybox.html#M000001">greybox_head (Greybox)</a><br />
<a href="classes/Greybox.html#M000002">greybox_link_to_image (Greybox)</a><br />
<a href="classes/Greybox.html#M000003">greybox_link_to_page (Greybox)</a><br />
<a href="classes/Greybox.html#M000004">greybox_links (Greybox)</a><br />
</div>
</div>
</body>
</html>
|
#define SW1 RB0_bit
#define SW2 RB1_bit
#define SW3 RB2_bit
#define SW4 RB3_bit
#define ENABLE RC5_bit
#define DIR RC6_bit
#define PULSE RC2_bit
void dynamic_delay_ms(unsigned int delay);
void main()
{
unsigned int frequency=0;
TRISB0_bit=1;
TRISB1_bit=1;
TRISB2_bit=1;
TRISB3_bit=1;
TRISC5_bit=0;
TRISC6_bit=0;
TRISC2_bit=0;
ENABLE=1; //disable motor
while(1)
{
if(SW1==0)
{
frequency=250; //set motor speed to 250Hz
DIR=1; //set motor direction to CW
ENABLE=0; //enable motor
}
else if(SW2==0)
{
frequency=250; //set motor speed to 250Hz
DIR=0; //set motor direction to CCW
ENABLE=0; //enable motor
}
else if(SW3==0)
{
frequency=500; //set motor speed to 500Hz
DIR=1; //set motor direction to CW
ENABLE=0; //enable motor
}
else if(SW4==0)
{
frequency=500; //set motor speed to 500Hz
DIR=0; //set motor direction to CCW
ENABLE=0; //enable motor
}
else
{
frequency=0; //set motor speed to 800Hz
ENABLE=1; //disable motor
}
PULSE=1;
PULSE=0;
if(frequency>0)dynamic_delay_ms(1000/frequency);
}
}
void dynamic_delay_ms(unsigned int delay)
{
for( ;delay>0;delay-=1)Delay_Ms(1);
}
|
# -*- coding: utf-8 -*-
"""
algorithms.segment.trained_model
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An API for a trained segmentation model to predict nodule boundaries and
descriptive statistics.
"""
import os
import numpy as np
import scipy.ndimage
from keras.models import load_model
from ...algorithms.segment.src.model import dice_coef_loss, dice_coef
from ...algorithms.segment.src.training import get_best_model_path, get_data_shape
from ...preprocess.load_ct import load_ct, MetaData
def predict(dicom_path, centroids):
""" Predicts nodule boundaries.
Given a path to DICOM images and a list of centroids
(1) load the segmentation model from its serialized state
(2) pre-process the dicom data into whatever format the segmentation
model expects
(3) for each pixel create an indicator 0 or 1 of if the pixel is
cancerous
(4) write this binary mask to disk, and return the path to the mask
Args:
dicom_path (str): a path to a DICOM directory
centroids (list[dict]): A list of centroids of the form::
{'x': int,
'y': int,
'z': int}
Returns:
dict: Dictionary containing path to serialized binary masks and
volumes per centroid with form::
{'binary_mask_path': str,
'volumes': list[float]}
"""
voxel_data, meta = load_ct(dicom_path)
model = load_model(get_best_model_path(), custom_objects={'dice_coef_loss': dice_coef_loss, 'dice_coef': dice_coef})
x, y, z, channels = get_data_shape()
input_data = np.ndarray((1, x, y, z, channels)) # batch, x, y, z, channels
# Crop the input data to the required data shape and pad with zeros
padded_data = np.zeros_like(input_data)
min_x, min_y, min_z = min(x, voxel_data.shape[0]), min(y, voxel_data.shape[1]), min(z, voxel_data.shape[2])
padded_data[0, :min_x, :min_y, :min_z, 0] = voxel_data[:min_x, :min_y, :min_z]
input_data = padded_data
output_data = model.predict(input_data)
segment_path = os.path.join(os.path.dirname(__file__), 'assets', "lung-mask.npy")
np.save(segment_path, output_data[0, :, :, :, 0])
volumes = calculate_volume(segment_path, centroids)
return {
'binary_mask_path': segment_path,
'volumes': volumes
}
def calculate_volume(segment_path, centroids, ct_path=None):
""" Calculates tumor volume in cubic mm if a ct_path has been provided.
Given the path to the serialized mask and a list of centroids
(1) For each centroid, calculate the volume of the tumor.
(2) DICOM has voxels' sizes in mm therefore the volume should be in real
measurements (not pixels).
Args:
segment_path (str): a path to a mask file
centroids (list[dict]): A list of centroids of the form::
{'x': int,
'y': int,
'z': int}
ct_path (str): contains the path to the folder containing the dcm-files of a series.
If None then volume will be returned in voxels.
Returns:
list[float]: a list of volumes in cubic mm (if a ct_path has been provided)
of a connected component for each centroid.
"""
mask = np.load(segment_path)
mask, _ = scipy.ndimage.label(mask)
labels = [mask[centroid['x'], centroid['y'], centroid['z']] for centroid in centroids]
volumes = np.bincount(mask.flatten())
volumes = volumes[labels].tolist()
if ct_path:
meta = load_ct(ct_path, voxel=False)
meta = MetaData(meta)
spacing = np.prod(meta.spacing)
volumes = [volume * spacing for volume in volumes]
return volumes
|
# common operations for odinclean and odinnormalize
import re
from xigt import Item
PRITAGS = ('L','G','T','L-G','L-T','G-T','L-G-T','M','B','C')
SECTAGS = ('AC','AL','CN','CR','DB','EX','LN','LT','SY')
def copy_items(items):
return [
Item(id=item.id, type=item.type,
attributes=item.attributes, text=item.text)
for item in items
]
def get_tags(item):
return item.attributes.get('tag', '').split('+')
def remove_blank_items(items):
return [
i for i in items
if (i.text or '').strip() != ''
]
def min_indent(items, tags=None):
# find the minimum indentation among items
if tags is None: tags = PRITAGS
tags = set(tags).difference(['M','B'])
indents = []
for item in items:
tag = get_tags(item)[0]
if tag in tags:
indents.append(re.match(r'\s*', item.text, re.U).end())
return min(indents or [0])
def shift_left(items, tags=None):
if tags is None: tags = PRITAGS
tags = set(tags).difference(['M','B'])
maxshift = min_indent(items, tags)
for item in items:
tag = get_tags(item)[0]
if tag == 'M':
item.text = item.text.strip()
elif tag in tags:
item.text = item.text[maxshift:]
return items
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>mbed utilities: Class Members - Variables</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">mbed utilities
</div>
<div id="projectbrief">Utilities that can be added to an mbed project using "mbed add [email protected]:wdwalker/wdwalker-mbed"</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.12 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
$(function() {
initMenu('',true,false,'search.php','Search');
$(document).ready(function() { init_search(); });
});
</script>
<div id="main-nav"></div>
</div><!-- top -->
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div class="contents">
 <ul>
<li>_spi
: <a class="el" href="classwdwalker_1_1mbed_1_1peripherals_1_1_c12832.html#add025c93766493e3c8ea7074a5efd0a2">wdwalker::mbed::peripherals::C12832</a>
</li>
</ul>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated by  <a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.12
</small></address>
</body>
</html>
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
var gTestfile = 'regress-349023-02.js';
//-----------------------------------------------------------------------------
var BUGNUMBER = 349023;
var summary = 'Bogus JSCLASS_IS_EXTENDED in the generator class';
var actual = 'No Crash';
var expect = 'No Crash';
//-----------------------------------------------------------------------------
test();
//-----------------------------------------------------------------------------
function test()
{
enterFunc ('test');
printBugNumber(BUGNUMBER);
printStatus (summary);
var gen = (function() { yield 3; })();
gen.foopy;
reportCompare(expect, actual, summary);
exitFunc ('test');
}
|
#!/usr/local/bin/python
#CHIPSEC: Platform Security Assessment Framework
#Copyright (c) 2010-2015, Intel Corporation
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; Version 2.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#Contact information:
#[email protected]
#
"""
The msr command allows direct access to read and write MSRs.
"""
__version__ = '1.0'
from chipsec.command import BaseCommand
from chipsec.hal.msr import Msr
# CPU Model Specific Registers
class MSRCommand(BaseCommand):
"""
>>> chipsec_util msr <msr> [eax] [edx] [cpu_id]
Examples:
>>> chipsec_util msr 0x3A
>>> chipsec_util msr 0x8B 0x0 0x0 0
"""
def requires_driver(self):
# No driver required when printing the util documentation
if len(self.argv) < 3:
return False
return True
def run(self):
if len(self.argv) < 3:
print MSRCommand.__doc__
return
#msr = Msr( os_helper )
msr_addr = int(self.argv[2],16)
if (3 == len(self.argv)):
for tid in range(self.cs.msr.get_cpu_thread_count()):
(eax, edx) = self.cs.msr.read_msr( tid, msr_addr )
val64 = ((edx << 32) | eax)
self.logger.log( "[CHIPSEC] CPU%d: RDMSR( 0x%x ) = %016X (EAX=%08X, EDX=%08X)" % (tid, msr_addr, val64, eax, edx) )
elif (4 == len(self.argv)):
cpu_thread_id = int(self.argv[3], 16)
(eax, edx) = self.cs.msr.read_msr( cpu_thread_id, msr_addr )
val64 = ((edx << 32) | eax)
self.logger.log( "[CHIPSEC] CPU%d: RDMSR( 0x%x ) = %016X (EAX=%08X, EDX=%08X)" % (cpu_thread_id, msr_addr, val64, eax, edx) )
else:
eax = int(self.argv[3], 16)
edx = int(self.argv[4], 16)
val64 = ((edx << 32) | eax)
if (5 == len(self.argv)):
self.logger.log( "[CHIPSEC] All CPUs: WRMSR( 0x%x ) = %016X" % (msr_addr, val64) )
for tid in range(self.cs.msr.get_cpu_thread_count()):
self.cs.msr.write_msr( tid, msr_addr, eax, edx )
elif (6 == len(self.argv)):
cpu_thread_id = int(self.argv[5], 16)
self.logger.log( "[CHIPSEC] CPU%d: WRMSR( 0x%x ) = %016X" % (cpu_thread_id, msr_addr, val64) )
self.cs.msr.write_msr( cpu_thread_id, msr_addr, eax, edx )
commands = { 'msr': MSRCommand }
|
package com.staircaselabs.test;
import java.util.List;
public class SomeClass {
private void doUncuddled( List<String> list ) {
for( String str : list )
{
System.out.println( "one" );
}
}
private void doUncuddledWithComments( List<String> list ) {
for( String str : list )// comment 1
// comment 2
{ // comment 3
// comment 4
System.out.println( "two" ); // comment 5
}
}
private void doUncuddledNestedWithComments( List<String> list ) {
for( String str : list ) // comment 1
{ // comment 2
System.out.println( "five" );
for( String str : list )
// comment 3
{
System.out.println( "six" );
}
}
}
private void doSingleLine( List<String> list ) {
for( String str : list ){System.out.println( "one" );}
}
}
|
//* This file is part of the MOOSE framework
//* https://www.mooseframework.org
//*
//* All rights reserved, see COPYRIGHT for full restrictions
//* https://github.com/idaholab/moose/blob/master/COPYRIGHT
//*
//* Licensed under LGPL 2.1, please see LICENSE for details
//* https://www.gnu.org/licenses/lgpl-2.1.html
#include "ScalarKernel.h"
#include "MooseVariableScalar.h"
defineLegacyParams(ScalarKernel);
InputParameters
ScalarKernel::validParams()
{
InputParameters params = ScalarKernelBase::validParams();
return params;
}
ScalarKernel::ScalarKernel(const InputParameters & parameters)
: ScalarKernelBase(parameters), _u(_is_implicit ? _var.sln() : uOld())
{
}
|
import { enableProdMode } from '@angular/core';
import { platformBrowserDynamic } from '@angular/platform-browser-dynamic';
import { AppModule } from './app/app.module.2';
import { environment } from './environments/environment';
if (environment.production) {
enableProdMode();
}
platformBrowserDynamic().bootstrapModule(AppModule);
|
/****************************************************************************
**
** Copyright (C) 2014 Klaralvdalens Datakonsult AB (KDAB).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt3D module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL3$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 3 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPLv3 included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 3 requirements
** will be met: https://www.gnu.org/licenses/lgpl.html.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 2.0 or later as published by the Free
** Software Foundation and appearing in the file LICENSE.GPL included in
** the packaging of this file. Please review the following information to
** ensure the GNU General Public License version 2.0 requirements will be
** met: http://www.gnu.org/licenses/gpl-2.0.html.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qpostman_p.h"
#include <private/qobject_p.h>
#include <Qt3DCore/qscenepropertychange.h>
#include <Qt3DCore/qbackendscenepropertychange.h>
#include <Qt3DCore/private/qscene_p.h>
#include <Qt3DCore/private/qlockableobserverinterface_p.h>
#include <Qt3DCore/qnode.h>
#include <Qt3DCore/private/qnode_p.h>
QT_BEGIN_NAMESPACE
namespace Qt3D {
class QPostmanPrivate : public QObjectPrivate
{
public:
QPostmanPrivate()
: QObjectPrivate()
, m_scene(Q_NULLPTR)
{
}
Q_DECLARE_PUBLIC(QPostman)
QScene *m_scene;
std::vector<QSceneChangePtr> m_batch;
};
QPostman::QPostman(QObject *parent)
: QObject(*new QPostmanPrivate, parent)
{
qRegisterMetaType<QSceneChangePtr >("QSceneChangePtr");
}
void QPostman::setScene(QScene *scene)
{
Q_D(QPostman);
d->m_scene = scene;
}
static inline QMetaMethod notifyFrontendNodeMethod()
{
int idx = QPostman::staticMetaObject.indexOfMethod("notifyFrontendNode(QSceneChangePtr)");
Q_ASSERT(idx != -1);
return QPostman::staticMetaObject.method(idx);
}
void QPostman::sceneChangeEvent(const QSceneChangePtr &e)
{
static const QMetaMethod notifyFrontendNode = notifyFrontendNodeMethod();
notifyFrontendNode.invoke(this, Q_ARG(QSceneChangePtr, e));
}
/*!
* This will start or append \a change to a batch of changes from frontend
* nodes. Once the batch is complete, when the event loop returns, the batch is
* sent to the QChangeArbiter to notify the backend aspects.
*/
void QPostman::notifyBackend(const QSceneChangePtr &change)
{
// If batch in progress
// add change
// otherwise start batch
// by calling a queued slot
Q_D(QPostman);
if (d->m_batch.empty())
QMetaObject::invokeMethod(this, "submitChangeBatch", Qt::QueuedConnection);
d->m_batch.push_back(change);
}
void QPostman::notifyFrontendNode(const QSceneChangePtr &e)
{
Q_D(QPostman);
QBackendScenePropertyChangePtr change = qSharedPointerDynamicCast<QBackendScenePropertyChange>(e);
if (!change.isNull() && d->m_scene != Q_NULLPTR) {
QNode *n = d->m_scene->lookupNode(change->targetNode());
if (n != Q_NULLPTR)
n->sceneChangeEvent(change);
}
}
void QPostman::submitChangeBatch()
{
Q_D(QPostman);
QLockableObserverInterface *arbiter = Q_NULLPTR;
if (d->m_scene && (arbiter = d->m_scene->arbiter()) != Q_NULLPTR) {
arbiter->sceneChangeEventWithLock(d->m_batch);
d->m_batch.clear();
}
}
} //Qt3D
QT_END_NAMESPACE
|
// Copyright 2019 Proyectos y Sistemas de Mantenimiento SL (eProsima).
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*!
* @file Mutex.cpp
*
*/
#include "TMutex.hpp"
#include <array>
#include <algorithm>
#include <cassert>
// TODO contar que solo bloquea una vez y nunca mas despues de timeout.
// TODO si se bloquea el dos, que no se bloqueen los posteriores
using namespace eprosima::fastrtps;
namespace eprosima {
namespace fastrtps {
std::atomic<pid_t> g_tmutex_thread_pid(0);
int (*g_origin_lock_func)(pthread_mutex_t*){nullptr};
int (*g_origin_timedlock_func)(pthread_mutex_t*, const struct timespec*){nullptr};
typedef struct
{
LockType type;
pthread_mutex_t* mutex;
uint32_t count;
} tmutex_record;
constexpr size_t g_tmutex_records_max_length = 30;
std::array<tmutex_record, g_tmutex_records_max_length> g_tmutex_records{{{LockType::LOCK, nullptr, 0}}};
int32_t g_tmutex_records_end = -1;
int32_t tmutex_find_record(pthread_mutex_t* mutex)
{
int32_t returned_position = -1;
for(int32_t position = 0; position <= g_tmutex_records_end; ++position)
{
if (mutex == g_tmutex_records[position].mutex)
{
returned_position = position;
break;
}
}
return returned_position;
}
} //namespace fastrtps
} //namespace eprosima
void eprosima::fastrtps::tmutex_start_recording()
{
assert(0 == g_tmutex_thread_pid);
g_tmutex_thread_pid = GET_TID();
g_tmutex_records = {{{LockType::LOCK, nullptr, 0}}};
g_tmutex_records_end = -1;
}
void eprosima::fastrtps::tmutex_stop_recording()
{
assert(0 < g_tmutex_thread_pid);
g_tmutex_thread_pid = 0;
}
void eprosima::fastrtps::tmutex_record_mutex_(LockType type, pthread_mutex_t* mutex)
{
assert(0 < g_tmutex_thread_pid);
// Search if mutex already has an entry.
int32_t position = tmutex_find_record(mutex);
if (-1 >= position)
{
assert(g_tmutex_records_max_length > size_t(g_tmutex_records_end + 1));
position = ++g_tmutex_records_end;
g_tmutex_records[position].type = type;
g_tmutex_records[position].mutex = mutex;
}
++g_tmutex_records[position].count;
}
size_t eprosima::fastrtps::tmutex_get_num_mutexes()
{
assert(0 == g_tmutex_thread_pid);
return g_tmutex_records_end + 1;
}
size_t eprosima::fastrtps::tmutex_get_num_lock_type()
{
size_t counter = 0;
if(-1 < g_tmutex_records_end)
{
std::for_each(g_tmutex_records.begin(), g_tmutex_records.begin() + g_tmutex_records_end + 1,
[&](const tmutex_record& record)
{
if(record.type == LockType::LOCK)
{
++counter;
}
});
}
return counter;
}
size_t eprosima::fastrtps::tmutex_get_num_timedlock_type()
{
size_t counter = 0;
if(-1 < g_tmutex_records_end)
{
std::for_each(g_tmutex_records.begin(), g_tmutex_records.begin() + g_tmutex_records_end + 1,
[&](const tmutex_record& record)
{
if(record.type == LockType::TIMED_LOCK)
{
++counter;
}
});
}
return counter;
}
pthread_mutex_t* eprosima::fastrtps::tmutex_get_mutex(const size_t index)
{
assert(index <= size_t(g_tmutex_records_end));
return g_tmutex_records[index].mutex;
}
void eprosima::fastrtps::tmutex_lock_mutex(const size_t index)
{
assert(index <= size_t(g_tmutex_records_end));
if(g_origin_lock_func != nullptr)
{
(*g_origin_lock_func)(g_tmutex_records[index].mutex);
}
}
void eprosima::fastrtps::tmutex_unlock_mutex(const size_t index)
{
assert(index <= size_t(g_tmutex_records_end));
pthread_mutex_unlock(g_tmutex_records[index].mutex);
}
|
# Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
import logging
from scap.model.xhtml import *
from scap.Model import Model
logger = logging.getLogger(__name__)
class DlTag(Model):
MODEL_MAP = {
'elements': [
{'tag_name': 'dt', 'list': '_elements', 'class': 'LiTag', 'max': None},
{'tag_name': 'dd', 'list': '_elements', 'class': 'LiTag', 'max': None},
],
'attributes': {},
}
MODEL_MAP['attributes'].update(ATTRIBUTE_GROUP_attrs)
|
<?php
/**
* Author: Nil Portugués Calderó <[email protected]>
* Date: 12/18/15
* Time: 11:36 PM.
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace NilPortugues\SchemaOrg\Properties;
use NilPortugues\SchemaOrg\SchemaProperty;
/**
* An identifier for the method of payment used (e.g. the last 4 digits of the credit card).
*/
class PaymentMethodIdProperty extends SchemaProperty
{
const SCHEMA_URL = 'http://schema.org/paymentMethodId';
const PROPERTY_NAME = 'paymentMethodId';
/**
* A list of schemas allowed to use this property.
*
* @var array
*/
protected static $allowedSchemas = [
'http://schema.org/Order',
'http://schema.org/Invoice',
];
}
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation,
## Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Upgrade for 'format'."""
from invenio.legacy.dbquery import run_sql
depends_on = ['invenio_release_1_1_0']
def info():
"""Upgrader info."""
return "Larger values allowed for format.code."
def do_upgrade():
"""Perform upgrade."""
run_sql("ALTER TABLE format MODIFY COLUMN code varchar(20);")
def estimate():
""" Estimate running time of upgrade in seconds (optional). """
return 1
|
/**
* class JwtDecoder
* A class for decoding a JWT.
*/
export declare class JwtDecoder {
isValid: boolean;
expiryDate: Date;
isExpired: boolean;
claims: any;
private jwt;
/**
* Indicates whether the bearer token has expired.
* @param {string} token The bearer token.
* @return {boolean} A value indicating that the token has expired.
*/
static bearerTokenExpired(token: string): boolean;
constructor(jwt: string);
/**
* Decodes the token into the constituent claims.
*/
private decodeToken();
/**
* Sets the expiry date based on the expiration in the JWT.
*/
private readTokenExpirationDate();
/**
* Checks the token for expiry.
* @param {number} offsetSeconds A number of seconds to use as an offset.
*/
private checkTokenExpiry(offsetSeconds?);
}
|
/***************************************************************************
qgsundowidget.h
---------------------
begin : June 2009
copyright : (C) 2009 by Martin Dobias
email : wonder dot sk at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#ifndef QGSUNDOWIDGET_H
#define QGSUNDOWIDGET_H
#include <QtCore/QVariant>
#include <QtGui/QAction>
#include <QtGui/QApplication>
#include <QtGui/QButtonGroup>
#include <QtGui/QDockWidget>
#include <QtGui/QGridLayout>
#include <QtGui/QPushButton>
#include <QtGui/QSpacerItem>
#include <QtGui/QWidget>
#include <QUndoView>
#include <QUndoStack>
class QgsMapCanvas;
class QgsMapLayer;
/**
* Class that handles undo display fo undo commands
*/
class QgsUndoWidget : public QDockWidget
{
Q_OBJECT
public:
QWidget *dockWidgetContents;
QGridLayout *gridLayout;
QSpacerItem *spacerItem;
QPushButton *undoButton;
QPushButton *redoButton;
QSpacerItem *spacerItem1;
QgsUndoWidget( QWidget * parent, QgsMapCanvas* mapCanvas );
void setupUi( QDockWidget *UndoWidget );
void retranslateUi( QDockWidget *UndoWidget );
/**
* Setting new undo stack for undo view
*/
void setUndoStack( QUndoStack * undoStack );
/**
* Handles destroying of stack when active layer is changed
*/
void destroyStack();
/**
* Access to dock's contents
* @note added in 1.9
*/
QWidget* dockContents() { return dockWidgetContents; }
public slots:
/**
* Changes undo stack which is displayed by undo view
*/
void layerChanged( QgsMapLayer * layer );
/**
* Slot to handle undo changed signal
*/
void undoChanged( bool value );
/**
* Slot to handle redo changed signal
*/
void redoChanged( bool value );
/**
* Slot to handle index changed signal
*/
void indexChanged( int curIndx );
/**
* Undo operation called from button push
*/
void undo();
/**
* Redo operation called from button push
*/
void redo();
signals:
void undoStackChanged();
private:
QUndoView * mUndoView;
QUndoStack * mUndoStack;
QgsMapCanvas* mMapCanvas;
int mPreviousIndex;
int mPreviousCount;
};
#endif // QGSUNDOWIDGET_H
|
#
# Project Kimchi
#
# Copyright IBM, Corp. 2014
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import lxml.etree as ET
import os
import socket
import stat
import string
import urlparse
from lxml import objectify
from lxml.builder import E
from wok.exception import InvalidParameter, NotFoundError
from wok.plugins.kimchi.utils import check_url_path
BUS_TO_DEV_MAP = {'ide': 'hd', 'virtio': 'vd', 'scsi': 'sd'}
DEV_TYPE_SRC_ATTR_MAP = {'file': 'file', 'block': 'dev'}
def get_disk_xml(params):
"""
<disk type='file' device='cdrom'>
<driver name='qemu' type='raw'/>
[source XML according to src_type]
<target dev='%(dev)s' bus='%(bus)s'/>
<readonly/>
</disk>
"""
path = params['path']
disk_type = params.get('disk', None)
if disk_type is None:
disk_type = _get_disk_type(path) if len(path) > 0 else 'file'
disk = E.disk(type=disk_type, device=params['type'])
driver = E.driver(name='qemu', type=params['format'])
if params['type'] != 'cdrom':
driver.set('cache', 'none')
if params.get('pool_type') == "netfs":
driver.set("io", "native")
disk.append(driver)
# Get device name according to bus and index values
dev = params.get('dev', (BUS_TO_DEV_MAP[params['bus']] +
string.lowercase[params.get('index', 0)]))
disk.append(E.target(dev=dev, bus=params['bus']))
if params.get('address'):
# ide disk target id is always '0'
disk.append(E.address(
type='drive', controller=params['address']['controller'],
bus=params['address']['bus'], target='0',
unit=params['address']['unit']))
if len(params['path']) == 0:
return (dev, ET.tostring(disk, encoding='utf-8', pretty_print=True))
if disk_type == 'network':
"""
<source protocol='%(protocol)s' name='%(url_path)s'>
<host name='%(hostname)s' port='%(port)s'/>
</source>
"""
output = urlparse.urlparse(params['path'])
port = str(output.port or socket.getservbyname(output.scheme))
source = E.source(protocol=output.scheme, name=output.path)
source.append(E.host(name=output.hostname, port=port))
else:
"""
<source file='%(src)s' />
"""
source = E.source()
source.set(DEV_TYPE_SRC_ATTR_MAP[disk_type], params['path'])
disk.append(source)
return (dev, ET.tostring(disk, encoding='utf-8', pretty_print=True))
def _get_disk_type(path):
if check_url_path(path):
return 'network'
if not os.path.exists(path):
raise InvalidParameter("KCHVMSTOR0003E", {'value': path})
# Check if path is a valid local path
if os.path.isfile(path):
return 'file'
r_path = os.path.realpath(path)
if stat.S_ISBLK(os.stat(r_path).st_mode):
return 'block'
raise InvalidParameter("KCHVMSTOR0003E", {'value': path})
def get_device_node(dom, dev_name):
xml = dom.XMLDesc(0)
devices = objectify.fromstring(xml).devices
disk = devices.xpath("./disk/target[@dev='%s']/.." % dev_name)
if not disk:
raise NotFoundError("KCHVMSTOR0007E",
{'dev_name': dev_name,
'vm_name': dom.name()})
return disk[0]
def get_vm_disk_info(dom, dev_name):
# Retrieve disk xml and format return dict
disk = get_device_node(dom, dev_name)
if disk is None:
return None
try:
source = disk.source
if source is not None:
src_type = disk.attrib['type']
if src_type == 'network':
host = source.host
path = (source.attrib['protocol'] + '://' +
host.attrib['name'] + ':' +
host.attrib['port'] + source.attrib['name'])
else:
path = source.attrib[DEV_TYPE_SRC_ATTR_MAP[src_type]]
except:
path = ""
return {'dev': dev_name,
'path': path,
'type': disk.attrib['device'],
'format': disk.driver.attrib['type'],
'bus': disk.target.attrib['bus']}
def get_vm_disks(dom):
xml = dom.XMLDesc(0)
devices = objectify.fromstring(xml).devices
storages = {}
all_disks = devices.xpath("./disk[@device='disk']")
all_disks.extend(devices.xpath("./disk[@device='cdrom']"))
for disk in all_disks:
storages[disk.target.attrib['dev']] = disk.target.attrib['bus']
return storages
|
namespace ALS.Glance.Api.Security.Filters
{
public abstract class BearerAuthenticationAttribute : ApiAuthenticationAttribute
{
/// <summary>
/// Creates a new BearerAuthenticationAttribute with Scheme="Bearer"
/// </summary>
protected BearerAuthenticationAttribute()
: base("Bearer") { }
}
}
|
# plot rotation period vs orbital period
import os
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import glob
import re
from gyro import gyro_age
import teff_bv as tbv
import scipy.stats as sps
from calc_completeness import calc_comp
# np.set_printoptions(threshold=np.nan, linewidth=9)
plotpar = {'axes.labelsize': 18,
'text.fontsize': 10,
'legend.fontsize': 18,
'xtick.labelsize': 18,
'ytick.labelsize': 18,
'text.usetex': True}
plt.rcParams.update(plotpar)
DATA_DIR = "/Users/ruthangus/projects/turnip/turnip/data/"
def save_data(nbins):
fnames = glob.glob(os.path.join(DATA_DIR, "/koi_results/*h5"))
koi, period, errp, errm, lnerrp, lnerrm = [], [], [], [], [], []
for i, fn in enumerate(fnames):
df = pd.read_hdf(fn, key="samples")
phist, bins = np.histogram(df.ln_period.values, nbins)
ln_p = bins[phist == max(phist)][0]
period.append(np.exp(ln_p))
lnerrp.append(np.percentile(df.ln_period.values, 84) - ln_p)
lnerrm.append(ln_p - np.percentile(df.ln_period.values, 16))
errp.append(np.exp(lnerrp[i]/ln_p))
errm.append(np.exp(lnerrm[i]/ln_p))
koi.append(re.findall('\d+', fn)[0])
table = pd.DataFrame({"koi": np.array(koi), "period": np.array(period),
"errp": np.array(errp), "errm": np.array(errm)})
table.to_csv("period_point_estimates.csv")
def make_histogram():
df = pd.read_csv("period_point_estimates.csv")
plt.clf()
plt.hist(df.period, 20)
plt.savefig("gp_period_hist")
def make_df():
df = pd.read_csv("period_point_estimates.csv")
planets = pd.read_csv(os.path.join(DATA_DIR, "cumulative.csv"),
skiprows=155)
kois = []
for i, k in enumerate(planets.kepoi_name.values):
# print(planets.kepoi_name.values[i])
# print(type(planets.kepoi_name.values[i]))
koi_str = re.findall('\d+', planets.kepoi_name.values[i])[0]
kois.append(int(koi_str))
planets["koi"] = kois
joint = pd.merge(planets, df, on="koi")
joint.to_csv("planet_periods.csv")
def plot_periods():
df = pd.read_csv("planet_periods.csv")
m = np.log(df.period.values) > 1
lnporb = np.log(df.koi_period.values[m])
lnprot = np.log(df.period.values[m])
porb = df.koi_period.values[m]
prot = df.period.values[m]
radius = np.log(df.koi_prad.values[m])
teff = df.koi_steff.values[m]
plt.clf()
plt.scatter(porb, prot, s=5*radius, c=teff, vmin=4400, vmax=7000)
plt.loglog()
plt.colorbar()
plt.xlabel("$\ln(\mathrm{Orbital~period})$")
plt.ylabel("$\ln(\mathrm{Rotation~period})$")
plt.subplots_adjust(bottom=.15)
plt.savefig("period_period")
# find the short rotators
m = np.log(df.period.values) < 1
print(df.koi.values[m])
# import kplr
# client = kplr.API()
# for i, k in enumerate(df.koi.values[m]):
# print(k)
# star = client.koi("{}.01".format(k))
# star.get_light_curves(fetch=True)
def plot_radii():
df = pd.read_csv("planet_periods.csv")
m = np.log(df.period.values) > 1
prot = df.period.values[m]
radius = np.log(df.koi_prad.values[m])
teff = df.koi_steff.values[m]
logg = df.koi_slogg.values[m]
feh = np.zeros(len(logg))
gyro = gyro_age(prot, teff, feh, logg)
age = gyro.barnes07("mh")
m = np.isfinite(age)
plt.clf()
plt.scatter(np.log(age[m]), np.log(radius[m]), c=teff[m], s=10, vmin=4400,
vmax=7000)
plt.colorbar()
plt.xlabel("$\ln(\mathrm{Age,~Gyr})$")
plt.ylabel("$\ln(\mathrm{Radius}, R_J)$")
plt.subplots_adjust(bottom=.15)
plt.savefig("age_radius")
l = age[m] < 3.295
print(len(radius[m][l]))
print(len(radius[m][~l]))
plt.clf()
plt.hist(radius[m][l], 50, normed=True, alpha=.5, label="young")
plt.hist(radius[m][~l], 40, normed=True, alpha=.5, label="old")
plt.legend()
plt.xlabel("Radius")
plt.savefig("radius_hist")
print(sps.ks_2samp(radius[m][l], radius[m][~l]))
cum_young = np.cumsum(radius[m][l]) / sum(radius[m][l])
cum_old = np.cumsum(radius[m][~l]) / sum(radius[m][~l])
plt.clf()
plt.plot(cum_young, label="young")
plt.plot(cum_old, label="old")
plt.savefig("radius_cdf")
# # print(np.unique(df.kepid.values[m]))
# for i in np.unique(df.kepid.values[m]):
# print("KIC", str(int(i)).zfill(9))
n = radius[m][l] < .5
n2 = radius[m][~l] < .5
print(len(radius[m][l][n]))
print(len(radius[m][~l][n2]))
plt.clf()
plt.hist(radius[m][l][n], 50, normed=True, alpha=.5, label="young")
plt.hist(radius[m][~l][n2], 40, normed=True, alpha=.5, label="old")
plt.legend()
plt.xlabel("Radius")
plt.savefig("radius_hist_hj")
print(sps.ks_2samp(radius[m][l][n], radius[m][~l][n2]))
n = radius[m] < .5
plt.clf()
plt.scatter(np.log(age[m][n]), np.log(radius[m][n]), c=teff[m][n], s=10,
vmin=4400, vmax=7000)
plt.colorbar()
plt.xlabel("$\ln(\mathrm{Age,~Gyr})$")
plt.ylabel("$\ln(\mathrm{Radius}, R_J)$")
plt.subplots_adjust(bottom=.15)
plt.savefig("age_radius_hj")
def plot_completeness():
df = pd.read_csv("planet_periods.csv")
comp = np.zeros((len(df.kepid.values)))
print(df.kepid.values[:10])
for i, kepid in enumerate(df.kepid.values[:10]):
print(i, "of", len(df.kepid.values))
print("id = ", kepid)
comp[i] = calc_comp(kepid, 365.25, 1.)
print(comp[i])
df["probtot"] = comp
plt.clf()
plt.plot(comp[:10], df.period.values[:10], "k.")
plt.savefig("comp_vs_period")
if __name__ == "__main__":
# save_data(100)
# make_histogram()
# make_df()
# plot_periods()
# plot_radii()
plot_completeness()
|
#!/usr/bin/env python
# This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
import sys
from optparse import OptionParser
from androguard.core.bytecodes import apk, dvm
from androguard.core.analysis import analysis
from androguard.core import androconf
sys.path.append("./elsim")
from elsim import elsim
from elsim.elsim_dalvik import ProxyDalvik, FILTERS_DALVIK_SIM, ProxyDalvikMethod, FILTERS_DALVIK_BB
from elsim.elsim_dalvik import ProxyDalvikBasicBlock, FILTERS_DALVIK_DIFF_BB
from elsim.elsim_dalvik import DiffDalvikMethod
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use these filenames', 'nargs' : 2 }
option_1 = { 'name' : ('-t', '--threshold'), 'help' : 'define the threshold', 'nargs' : 1 }
option_2 = { 'name' : ('-c', '--compressor'), 'help' : 'define the compressor', 'nargs' : 1 }
option_3 = { 'name' : ('-d', '--display'), 'help' : 'display the file in human readable format', 'action' : 'count' }
#option_4 = { 'name' : ('-e', '--exclude'), 'help' : 'exclude specific blocks (0 : orig, 1 : diff, 2 : new)', 'nargs' : 1 }
option_5 = { 'name' : ('-e', '--exclude'), 'help' : 'exclude specific class name (python regexp)', 'nargs' : 1 }
option_6 = { 'name' : ('-s', '--size'), 'help' : 'exclude specific method below the specific size', 'nargs' : 1 }
option_7 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
options = [option_0, option_1, option_2, option_3, option_5, option_6, option_7]
def main(options, arguments) :
details = False
if options.display != None :
details = True
if options.input != None :
ret_type = androconf.is_android( options.input[0] )
if ret_type == "APK" :
a = apk.APK( options.input[0] )
d1 = dvm.DalvikVMFormat( a.get_dex() )
elif ret_type == "DEX" :
d1 = dvm.DalvikVMFormat( open(options.input[0], "rb").read() )
dx1 = analysis.VMAnalysis( d1 )
ret_type = androconf.is_android( options.input[1] )
if ret_type == "APK" :
a = apk.APK( options.input[1] )
d2 = dvm.DalvikVMFormat( a.get_dex() )
elif ret_type == "DEX" :
d2 = dvm.DalvikVMFormat( open(options.input[1], "rb").read() )
dx2 = analysis.VMAnalysis( d2 )
print d1, dx1, d2, dx2
sys.stdout.flush()
threshold = None
if options.threshold != None :
threshold = float(options.threshold)
FS = FILTERS_DALVIK_SIM
FS[elsim.FILTER_SKIPPED_METH].set_regexp( options.exclude )
FS[elsim.FILTER_SKIPPED_METH].set_size( options.size )
el = elsim.Elsim( ProxyDalvik(d1, dx1), ProxyDalvik(d2, dx2), FS, threshold, options.compressor )
el.show()
e1 = elsim.split_elements( el, el.get_similar_elements() )
for i in e1 :
j = e1[ i ]
elb = elsim.Elsim( ProxyDalvikMethod(i), ProxyDalvikMethod(j), FILTERS_DALVIK_BB, threshold, options.compressor )
#elb.show()
eld = elsim.Eldiff( ProxyDalvikBasicBlock(elb), FILTERS_DALVIK_DIFF_BB )
#eld.show()
ddm = DiffDalvikMethod( i, j, elb, eld )
ddm.show()
print "NEW METHODS"
enew = el.get_new_elements()
for i in enew :
el.show_element( i, False )
print "DELETED METHODS"
edel = el.get_deleted_elements()
for i in edel :
el.show_element( i )
elif options.version != None :
print "Androdiff version %s" % androconf.ANDROGUARD_VERSION
if __name__ == "__main__" :
parser = OptionParser()
for option in options :
param = option['name']
del option['name']
parser.add_option(*param, **option)
options, arguments = parser.parse_args()
sys.argv[:] = arguments
main(options, arguments)
|
using CP77.CR2W.Reflection;
namespace CP77.CR2W.Types
{
[REDMeta]
public class gamedataParentAttachmentType_Record : gamedataTweakDBRecord
{
public gamedataParentAttachmentType_Record(CR2WFile cr2w, CVariable parent, string name) : base(cr2w, parent, name) { }
}
}
|
namespace NES.Tests.Stubs
{
public interface IEvent
{
}
}
|
<?php
$params = require(__DIR__ . '/params.php');
$config = [
'id' => 'basic',
'language' => 'ru-RU',
'basePath' => dirname(__DIR__),
'bootstrap' => ['log', 'languageSwitcher'],
'defaultRoute' => 'book/index',
'components' => [
'assetManager' => [
'bundles' => false,
],
'request' => [
'cookieValidationKey' => '1hezUTdz9HiMT7vnsNkKA8oe44laJq2l',
],
'cache' => [
'class' => 'yii\caching\FileCache',
],
'user' => [
'identityClass' => 'app\models\User',
'enableAutoLogin' => true,
],
'errorHandler' => [
'errorAction' => 'site/error',
],
'mailer' => [
'class' => 'yii\swiftmailer\Mailer',
// send all mails to a file by default. You have to set
// 'useFileTransport' to false and configure a transport
// for the mailer to send real emails.
'useFileTransport' => true,
],
'log' => [
'traceLevel' => YII_DEBUG ? 3 : 0,
'targets' => [
[
'class' => 'yii\log\FileTarget',
'levels' => ['error', 'warning'],
],
],
],
'db' => require(__DIR__ . '/db.php'),
'urlManager' => [
'class' => 'yii\web\UrlManager',
// Disable index.php
'showScriptName' => false,
// Disable r= routes
'enablePrettyUrl' => true,
'rules' => array(
'<controller:\w+>/<id:\d+>' => '<controller>/view',
'<controller:\w+>/<action:\w+>/<id:\d+>' => '<controller>/<action>',
'<controller:\w+>/<action:\w+>' => '<controller>/<action>',
),
],
'i18n' => [
'translations' => [
'app*' => [
'class' => 'yii\i18n\PhpMessageSource',
'basePath' => '@app/messages',
'sourceLanguage' => 'en-EN',
'fileMap' => [
'app' => 'app.php',
'app/error' => 'error.php',
],
'on missingTranslation' =>
['app\components\TranslationEventHandler',
'handleMissingTranslation']
],
],
],
'languageSwitcher' => [
'class' => 'app\components\LanguageSwitcher',
],
],
'params' => $params,
];
if (YII_ENV_DEV) {
// configuration adjustments for 'dev' environment
$config['bootstrap'][] = 'debug';
$config['modules']['debug'] = [
'class' => 'yii\debug\Module',
];
$config['bootstrap'][] = 'gii';
$config['modules']['gii'] = [
'class' => 'yii\gii\Module',
];
}
return $config;
|
var helpers = require('../../lib/sorting').helpers;
var assert = require('assert');
describe('Helper functions for sorting', function() {
describe('exch', function() {
it('mutates contents of the original array', function() {
var arr = ['a', 'b', 'c'];
helpers.exch(arr, 0, 2);
helpers.exch(arr, 1, 2);
assert.notDeepEqual(arr, ['a', 'b', 'c']);
});
it('swaps positions of two array elements', function() {
var arr = ['a', 'b', 'c'];
helpers.exch(arr, 0, 1);
assert.equal(arr[0], 'b');
assert.equal(arr[1], 'a');
assert.equal(arr[2], 'c');
});
});
});
|
package com.troy.xifan.fragment;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.TabLayout;
import android.support.v4.app.Fragment;
import android.support.v4.view.ViewPager;
import android.support.v7.widget.Toolbar;
import android.view.View;
import butterknife.BindView;
import com.chenenyu.router.Router;
import com.troy.xifan.R;
import com.troy.xifan.adapter.SearchPagerAdapter;
import com.troy.xifan.config.Constants;
import java.util.ArrayList;
import java.util.List;
/**
* Created by chenlongfei on 2016/12/1.
*/
public class SearchFragment extends BaseFragment {
@BindView(R.id.toolbar) Toolbar mToolbar;
@BindView(R.id.tab_layout) TabLayout mTabLayout;
@BindView(R.id.view_pager) ViewPager mViewPager;
private List<Fragment> mFragments = new ArrayList<>();
private String[] mTabTitles;
public static Fragment newInstance() {
return new SearchFragment();
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
initFragments();
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
}
private void initFragments() {
mFragments.add(TrendFragment.newInstance());
mFragments.add(PublicTimelineFragment.newInstance());
}
@Override
protected void initViews() {
mToolbar.setTitle(getString(R.string.title_search));
mToolbar.setNavigationIcon(R.mipmap.ic_action_search);
mTabTitles = new String[] {
getString(R.string.title_tab_topic), getString(R.string.title_tab_public_timeline)
};
mViewPager.setOffscreenPageLimit(mTabTitles.length);
mViewPager.setAdapter(
new SearchPagerAdapter(getChildFragmentManager(), mFragments, mTabTitles));
mTabLayout.setupWithViewPager(mViewPager);
initListeners();
}
private void initListeners() {
mToolbar.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Router.build(Constants.Router.SEARCH).go(getActivity());
}
});
}
@Override
protected int getContentViewResId() {
return R.layout.fragment_search;
}
@Override
public void onRefresh() {
}
}
|
#!/usr/bin/env python2.5
#
# Copyright 2010 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OrgAppRecord (Model) query functions for the GHOP module.
"""
__authors__ = [
'"Lennard de Rijk" <[email protected]>',
]
from soc.logic.models import org_app_record
from soc.models.org_app_record import OrgAppRecord as \
org_app_model
from soc.models.survey_record import SurveyRecord
DEF_ACCEPTED_TEMPLATE = 'soc/org_app_survey/mail/accepted.html'
DEF_REJECTED_TEMPLATE = 'soc/org_app_survey/mail/rejected.html'
class Logic(org_app_record.Logic):
"""Logic class for OrgAppRecord.
"""
def __init__(self, model=org_app_model,
base_model=SurveyRecord, scope_logic=None, module_name='ghop',
mail_templates={'accepted': DEF_ACCEPTED_TEMPLATE,
'rejected': DEF_REJECTED_TEMPLATE}):
"""Defines the name, key_name and model for this entity.
"""
super(Logic, self).__init__(
model=model, base_model=base_model, scope_logic=scope_logic,
module_name=module_name, mail_templates=mail_templates)
logic = Logic()
|
#
# Copyright (c) 2004 Conectiva, Inc.
#
# Written by Gustavo Niemeyer <[email protected]>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
class PkgConfig(object):
def __init__(self, config):
self._config = config
def getFlagNames(self):
return self._config.keys("package-flags", ())
def getFlagTargets(self, flag):
return self._config.get(("package-flags", flag), {})
def createFlag(self, flag):
return self._config.set(("package-flags", flag), {})
def flagExists(self, flag):
return self._config.has(("package-flags", flag))
def getFlag(self, flag):
return self._config.get(("package-flags", flag))
def renameFlag(self, oldname, newname):
config = self._config
config.set(("package-flags", newname),
config.get(("package-flags", oldname)))
config.remove(("package-flags", oldname))
def setFlag(self, flag, name, relation=None, version=None):
self._config.add(("package-flags", flag, name),
(relation, version), unique=True)
def clearFlag(self, flag, name=None, relation=(), version=()):
if name:
if relation is () or version is ():
return self._config.remove(("package-flags", flag, name))
else:
return self._config.remove(("package-flags", flag, name),
(relation, version))
else:
return self._config.remove(("package-flags", flag))
def testFlag(self, flag, pkg):
for item in self._config.get(("package-flags", flag, pkg.name), ()):
if pkg.matches(*item):
return True
return False
def filterByFlag(self, flag, pkgs):
fpkgs = []
names = self._config.get(("package-flags", flag))
if names:
for pkg in pkgs:
lst = names.get(pkg.name)
if lst:
for item in lst:
if pkg.matches(*item):
fpkgs.append(pkg)
break
return fpkgs
def testAllFlags(self, pkg):
result = []
for flag in self._config.keys("package-flags", ()):
if self.testFlag(flag, pkg):
result.append(flag)
return result
def getPriority(self, pkg):
priority = None
priorities = self._config.get(("package-priorities", pkg.name))
if priorities:
priority = None
for loader in pkg.loaders:
inchannel = priorities.get(loader.getChannel().getAlias())
if (inchannel is not None and priority is None or
inchannel > priority):
priority = inchannel
if priority is None:
priority = priorities.get(None)
return priority
def setPriority(self, name, channelalias, priority):
self._config.set(("package-priorities", name, channelalias), priority)
def removePriority(self, name, channelalias):
return self._config.remove(("package-priorities", name, channelalias))
def getOrigin(self, pkg):
return self._config.get(("package-origins", pkg.name, pkg.version))
def setOrigin(self, pkg, channelalias):
self._config.set(("package-origins", pkg.name, pkg.version), channelalias)
def removeOrigin(self, pkg):
return self._config.remove(("package-origins", pkg.name, pkg.version))
|
#-----------------------------------------------------------------------------
#
# Copyright (c) 2005-2006 by Enthought, Inc.
# All rights reserved.
#
# Author: David C. Morrill <[email protected]>
#
#-----------------------------------------------------------------------------
""" An action that creates a new (and empty) user perspective. """
# Local imports.
from user_perspective_name import UserPerspectiveName
from workbench_action import WorkbenchAction
class NewUserPerspectiveAction(WorkbenchAction):
""" An action that creates a new (and empty) user perspective. """
#### 'Action' interface ###################################################
# The action's unique identifier.
id = 'enthought.pyface.workbench.action.new_user_perspective_action'
# The action's name.
name = 'New Perspective...'
###########################################################################
# 'Action' interface.
###########################################################################
def perform(self, event):
""" Peform the action. """
window = event.window
manager = window.workbench.user_perspective_manager
# Get the details of the new perspective.
upn = UserPerspectiveName(name='User Perspective %d' % manager.next_id)
if upn.edit_traits(view='new_view').result:
# Create a new (and empty) user perspective.
perspective = manager.create_perspective(
upn.name.strip(), upn.show_editor_area
)
# Add it to the window...
window.perspectives.append(perspective)
# ... and make it the active perspective.
window.active_perspective = perspective
return
#### EOF #####################################################################
|
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required, permission_required
from gestioneide.models import *
@method_decorator(permission_required('gestioneide.aula_view',raise_exception=True),name='dispatch')
class ListaAulas(ListView):
model = Aula
template_name="aulas.html"
context_object_name = "aulas_list"
@method_decorator(permission_required('gestioneide.aula_create',raise_exception=True),name='dispatch')
class NuevaAula(CreateView):
model = Aula
template_name="aula_nueva.html"
fields="__all__"
success_url = '/aulas/' ## FIXME esto deberia ser un reverse
@method_decorator(permission_required('gestioneide.aula_change',raise_exception=True),name='dispatch')
class EditarAula(UpdateView):
model = Aula
template_name="aula_editar.html"
fields = '__all__'
success_url = "/aulas/"
@method_decorator(permission_required('gestioneide.aula_view',raise_exception=True),name='dispatch')
class DetalleAula(DetailView):
model = Aula
context_object_name ="aula"
template_name="aula_detalle.html"
##FIXME esto va sin permisos. deberia tener?
def get_clases_dia(fecha,aula=None,profesor=None):
"""funcion que devuelve todas las clases que hay en un dia concreto"""
if aula:
#print "Vamos a listar las de aula %s"%aula
ret = Clase.objects.filter(hora_inicio__gte=fecha,hora_fin__lte=fecha+datetime.timedelta(days=1),aula=aula)
elif profesor:
#print "Vamos a listar las de prfesort %s"%profesor
ret = Clase.objects.filter(hora_inicio__gte=fecha,hora_fin__lte=fecha+datetime.timedelta(days=1),profesor=profesor)
else:
ret = Clase.objects.filter(hora_inicio__gte=fecha,hora_fin__lte=fecha+datetime.timedelta(days=1))
#print "Encontradas %s clases el dia %s"%(ret.count(),fecha)
return ret
def programacion_aula_dia(aula,dia):
#profesor = Profesor.objects.get(id=id)
clases = []
for hora in range(8,22):
##print "Vamos con la hora %s"%hora
for cuarto in range(00,60,15):
fecha_consulta = datetime.datetime(dia.year,dia.month,dia.day,hora,cuarto,0)
##print "Vamos con la fecha de consulta %s"%(fecha_consulta)
clase = Clase.objects.filter(aula=aula, hora_inicio__lte=fecha_consulta,hora_fin__gte=fecha_consulta)
if clase.count() == 1:
clase = clase[0]
##print "Anadimos la clase es: %s" % clase
clases.append(["%s:%s"%(hora,cuarto),"%s-%s"%(clase.nombre,clase.aula.nombre)])
else:
clases.append(["%s:%s"%(hora,cuarto),"................"])
return clases
|
<div>
<h1>Terms of Use</h1>
<div>
Last updated: <time>11/13/2018</time>
</div>
</div>
|
# coding: utf-8
from __future__ import unicode_literals, division
import hashlib
import hmac
import re
import time
from .common import InfoExtractor
from ..compat import compat_HTTPError
from ..utils import (
determine_ext,
float_or_none,
int_or_none,
parse_age_limit,
parse_duration,
url_or_none,
ExtractorError
)
class CrackleIE(InfoExtractor):
_VALID_URL = r'(?:crackle:|https?://(?:(?:www|m)\.)?(?:sony)?crackle\.com/(?:playlist/\d+/|(?:[^/]+/)+))(?P<id>\d+)'
_TESTS = [{
# geo restricted to CA
'url': 'https://www.crackle.com/andromeda/2502343',
'info_dict': {
'id': '2502343',
'ext': 'mp4',
'title': 'Under The Night',
'description': 'md5:d2b8ca816579ae8a7bf28bfff8cefc8a',
'duration': 2583,
'view_count': int,
'average_rating': 0,
'age_limit': 14,
'genre': 'Action, Sci-Fi',
'creator': 'Allan Kroeker',
'artist': 'Keith Hamilton Cobb, Kevin Sorbo, Lisa Ryder, Lexa Doig, Robert Hewitt Wolfe',
'release_year': 2000,
'series': 'Andromeda',
'episode': 'Under The Night',
'season_number': 1,
'episode_number': 1,
},
'params': {
# m3u8 download
'skip_download': True,
}
}, {
'url': 'https://www.sonycrackle.com/andromeda/2502343',
'only_matching': True,
}]
_MEDIA_FILE_SLOTS = {
'360p.mp4': {
'width': 640,
'height': 360,
},
'480p.mp4': {
'width': 768,
'height': 432,
},
'480p_1mbps.mp4': {
'width': 852,
'height': 480,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
country_code = self._downloader.params.get('geo_bypass_country', None)
countries = [country_code] if country_code else (
'US', 'AU', 'CA', 'AS', 'FM', 'GU', 'MP', 'PR', 'PW', 'MH', 'VI')
last_e = None
for country in countries:
try:
# Authorization generation algorithm is reverse engineered from:
# https://www.sonycrackle.com/static/js/main.ea93451f.chunk.js
media_detail_url = 'https://web-api-us.crackle.com/Service.svc/details/media/%s/%s?disableProtocols=true' % (
video_id, country)
timestamp = time.strftime('%Y%m%d%H%M', time.gmtime())
h = hmac.new(b'IGSLUQCBDFHEOIFM', '|'.join([media_detail_url, timestamp]).encode(),
hashlib.sha1).hexdigest().upper()
media = self._download_json(
media_detail_url, video_id, 'Downloading media JSON as %s' % country,
'Unable to download media JSON', headers={
'Accept': 'application/json',
'Authorization': '|'.join([h, timestamp, '117', '1']),
})
except ExtractorError as e:
# 401 means geo restriction, trying next country
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 401:
last_e = e
continue
raise
media_urls = media.get('MediaURLs')
if not media_urls or not isinstance(media_urls, list):
continue
title = media['Title']
formats = []
for e in media['MediaURLs']:
if e.get('UseDRM') is True:
continue
format_url = url_or_none(e.get('Path'))
if not format_url:
continue
ext = determine_ext(format_url)
if ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
format_url, video_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id='hls', fatal=False))
elif ext == 'mpd':
formats.extend(self._extract_mpd_formats(
format_url, video_id, mpd_id='dash', fatal=False))
elif format_url.endswith('.ism/Manifest'):
formats.extend(self._extract_ism_formats(
format_url, video_id, ism_id='mss', fatal=False))
else:
mfs_path = e.get('Type')
mfs_info = self._MEDIA_FILE_SLOTS.get(mfs_path)
if not mfs_info:
continue
formats.append({
'url': format_url,
'format_id': 'http-' + mfs_path.split('.')[0],
'width': mfs_info['width'],
'height': mfs_info['height'],
})
self._sort_formats(formats)
description = media.get('Description')
duration = int_or_none(media.get(
'DurationInSeconds')) or parse_duration(media.get('Duration'))
view_count = int_or_none(media.get('CountViews'))
average_rating = float_or_none(media.get('UserRating'))
age_limit = parse_age_limit(media.get('Rating'))
genre = media.get('Genre')
release_year = int_or_none(media.get('ReleaseYear'))
creator = media.get('Directors')
artist = media.get('Cast')
if media.get('MediaTypeDisplayValue') == 'Full Episode':
series = media.get('ShowName')
episode = title
season_number = int_or_none(media.get('Season'))
episode_number = int_or_none(media.get('Episode'))
else:
series = episode = season_number = episode_number = None
subtitles = {}
cc_files = media.get('ClosedCaptionFiles')
if isinstance(cc_files, list):
for cc_file in cc_files:
if not isinstance(cc_file, dict):
continue
cc_url = url_or_none(cc_file.get('Path'))
if not cc_url:
continue
lang = cc_file.get('Locale') or 'en'
subtitles.setdefault(lang, []).append({'url': cc_url})
thumbnails = []
images = media.get('Images')
if isinstance(images, list):
for image_key, image_url in images.items():
mobj = re.search(r'Img_(\d+)[xX](\d+)', image_key)
if not mobj:
continue
thumbnails.append({
'url': image_url,
'width': int(mobj.group(1)),
'height': int(mobj.group(2)),
})
return {
'id': video_id,
'title': title,
'description': description,
'duration': duration,
'view_count': view_count,
'average_rating': average_rating,
'age_limit': age_limit,
'genre': genre,
'creator': creator,
'artist': artist,
'release_year': release_year,
'series': series,
'episode': episode,
'season_number': season_number,
'episode_number': episode_number,
'thumbnails': thumbnails,
'subtitles': subtitles,
'formats': formats,
}
raise last_e
|
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>QUnit DateUtil</title>
</head>
<body>
<div id="qunit"></div>
<div id="qunit-fixture"></div>
<script src="../../loader/loader.js"></script>
<script src="test.js"></script>
</body>
</html>
|
import { AsyncIterableX } from '../asynciterable';
import { bindCallback } from '../internal/bindcallback';
class MapAsyncIterable<TSource, TResult> extends AsyncIterableX<TResult> {
private _source: Iterable<TSource | Promise<TSource>> | AsyncIterable<TSource>;
private _selector: (value: TSource, index: number) => TResult | Promise<TResult>;
constructor(
source: Iterable<TSource | Promise<TSource>> | AsyncIterable<TSource>,
selector: (value: TSource, index: number) => TResult | Promise<TResult>) {
super();
this._source = source;
this._selector = selector;
}
async *[Symbol.asyncIterator]() {
let i = 0;
for await (let item of <AsyncIterable<TSource>>(this._source)) {
yield await this._selector(item, i++);
}
}
}
export function mapAsync<TSource, TResult>(
source: Iterable<TSource | Promise<TSource>> | AsyncIterable<TSource>,
selector: (value: TSource, index: number) => TResult | Promise<TResult>,
thisArg?: any): AsyncIterableX<TResult> {
return new MapAsyncIterable<TSource, TResult>(source, bindCallback(selector, thisArg, 2));
}
|
<div class='overlay' ng-show='show()'></div>
|
#!/usr/bin/env python
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Routines for configuring OpenStack Service
"""
import gettext
import logging
import logging.config
import logging.handlers
import sys
import os
from keystone import cfg
gettext.install("keystone", unicode=1)
class Config(cfg.CommonConfigOpts):
def __call__(self, config_files=None):
if config_files is not None:
self._opts["config_file"]["opt"].default = config_files
return super(Config, self).__call__()
def __getitem__(self, key, default=None):
return getattr(self, key, default)
def __setitem__(self, key, value):
return setattr(self, key, value)
def iteritems(self):
for key in self._opts:
yield (key, getattr(self, key))
def to_dict(self):
""" Returns a representation of the CONF settings as a dict."""
ret = {}
for key, val in self.iteritems():
if val is not None:
ret[key] = val
for grp_name in self._groups:
ret[grp_name] = grp_dict = {}
grp = self._get_group(grp_name)
for opt in grp._opts: # pylint: disable=W0212
grp_dict[opt] = self._get(opt, grp_name)
return ret
def setup_logging(conf):
"""
Sets up the logging options for a log with supplied name
:param conf: a cfg.ConfOpts object
"""
if conf.log_config:
# Use a logging configuration file for all settings...
for location in (sys.argv[0], "."):
pth = os.path.join(location, "etc", conf.log_config)
if os.path.exists(pth):
logging.config.fileConfig(pth)
return
raise RuntimeError("Unable to locate specified logging "
"config file: %s" % conf.log_config)
root_logger = logging.root
if conf.debug:
root_logger.setLevel(logging.DEBUG)
elif conf.verbose:
root_logger.setLevel(logging.INFO)
else:
root_logger.setLevel(logging.WARNING)
formatter = logging.Formatter(conf.log_format, conf.log_date_format)
if conf.use_syslog:
try:
facility = getattr(logging.handlers.SysLogHandler,
conf.syslog_log_facility)
except AttributeError:
raise ValueError(_("Invalid syslog facility"))
handler = logging.handlers.SysLogHandler(address="/dev/log",
facility=facility)
elif conf.log_file:
logfile = conf.log_file
if conf.log_dir:
logfile = os.path.join(conf.log_dir, logfile)
handler = logging.handlers.WatchedFileHandler(logfile)
else:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
def register_str(*args, **kw):
group = _ensure_group(kw)
return CONF.register_opt(cfg.StrOpt(*args, **kw), group=group)
def register_cli_str(*args, **kw):
group = _ensure_group(kw)
return CONF.register_opt(cfg.StrOpt(*args, **kw), group=group)
def register_bool(*args, **kw):
group = _ensure_group(kw)
return CONF.register_opt(cfg.BoolOpt(*args, **kw), group=group)
def register_cli_bool(*args, **kw):
group = _ensure_group(kw)
return CONF.register_cli_opt(cfg.BoolOpt(*args, **kw), group=group)
def register_list(*args, **kw):
group = _ensure_group(kw)
return CONF.register_opt(cfg.ListOpt(*args, **kw), group=group)
def register_multi_string(*args, **kw):
group = _ensure_group(kw)
return CONF.register_opt(cfg.MultiStrOpt(*args, **kw), group=group)
def _ensure_group(kw):
group = kw.pop("group", None)
if group:
CONF.register_group(cfg.OptGroup(name=group))
return group
CONF = Config(project="keystone")
register_str("default_store")
register_str("service_header_mappings")
register_list("extensions")
register_str("service_host")
register_str("service_port")
register_bool("service_ssl")
register_str("admin_host")
register_str("admin_port")
register_bool("admin_ssl")
register_str("bind_host")
register_str("bind_port")
register_str("certfile")
register_str("keyfile")
register_str("ca_certs")
register_bool("cert_required")
register_str("keystone_admin_role")
register_str("keystone_service_admin_role")
register_bool("hash_password")
register_str("backends")
register_str("global_service_id")
register_bool("disable_tokens_in_url")
register_str("sql_connection", group="keystone.backends.sqlalchemy")
register_str("backend_entities", group="keystone.backends.sqlalchemy")
register_str("sql_idle_timeout", group="keystone.backends.sqlalchemy")
# May need to initialize other backends, too.
register_str("ldap_url", group="keystone.backends.ldap")
register_str("ldap_user", group="keystone.backends.ldap")
register_str("ldap_password", group="keystone.backends.ldap")
register_list("backend_entities", group="kkeystone.backends.ldap")
|
/**
* Created by uzysjung on 2016. 9. 6..
*/
import axios from 'axios';
import { authError } from './user';
import { HOSTNAME , PORT } from '../../config'
const ROOT_URL = `http://${HOSTNAME}:${PORT}`;
export function axiosGetRequest (url,query,reqAction,errAction,recvAction) {
return (dispatch, state) => {
dispatch(reqAction());
const config = {
headers: {
Authorization : localStorage.getItem('token')
},
timeout: 5000,
baseURL : ROOT_URL,
url :url,
method : 'get'
};
if(query) config.params = query;
//console.log('config:',config);
const request = axios(config);
request.then(response => {
console.log('response:',response)
dispatch(recvAction(response.data));
}).catch((error) => {
console.log(error.response);
if(error.response) {
if(error.response.status === 401) {
dispatch(authError('토큰이 유효하지 않습니다.'));
}
}
console.log('Error',error.message);
dispatch(errAction(error));
});
}
};
export function axiosSendRequest(method,url,data,query={},reqAction,errAction,recvAction) {
console.log('query:::',query);
return (dispatch, state) => {
dispatch(reqAction());
const config = {
headers: {
Authorization : localStorage.getItem('token')
},
timeout: 3000,
baseURL : ROOT_URL,
url :url,
data : data,
method : method,
params : query
};
const request = axios(config);
request.then(response => {
dispatch(recvAction(response.data));
}).catch((error) => {
if(error.response) {
if(error.response.status === 401) {
dispatch(authError('토큰이 유효하지 않습니다.'));
}
} else {
console.log('Error',error.message);
dispatch(errAction(error));
}
});
}
}
|
import pytest
from mitmproxy.utils import strutils
def test_always_bytes():
assert strutils.always_bytes(bytes(range(256))) == bytes(range(256))
assert strutils.always_bytes("foo") == b"foo"
with pytest.raises(ValueError):
strutils.always_bytes(u"\u2605", "ascii")
with pytest.raises(TypeError):
strutils.always_bytes(42, "ascii")
def test_always_str():
with pytest.raises(TypeError):
strutils.always_str(42)
assert strutils.always_str("foo") == "foo"
assert strutils.always_str(b"foo") == "foo"
assert strutils.always_str(None) is None
def test_escape_control_characters():
assert strutils.escape_control_characters(u"one") == u"one"
assert strutils.escape_control_characters(u"\00ne") == u".ne"
assert strutils.escape_control_characters(u"\nne") == u"\nne"
assert strutils.escape_control_characters(u"\nne", False) == u".ne"
assert strutils.escape_control_characters(u"\u2605") == u"\u2605"
assert (
strutils.escape_control_characters(bytes(bytearray(range(128))).decode()) ==
u'.........\t\n..\r.................. !"#$%&\'()*+,-./0123456789:;<'
u'=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~.'
)
assert (
strutils.escape_control_characters(bytes(bytearray(range(128))).decode(), False) ==
u'................................ !"#$%&\'()*+,-./0123456789:;<'
u'=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~.'
)
with pytest.raises(ValueError):
strutils.escape_control_characters(b"foo")
def test_bytes_to_escaped_str():
assert strutils.bytes_to_escaped_str(b"foo") == "foo"
assert strutils.bytes_to_escaped_str(b"\b") == r"\x08"
assert strutils.bytes_to_escaped_str(br"&!?=\)") == r"&!?=\\)"
assert strutils.bytes_to_escaped_str(b'\xc3\xbc') == r"\xc3\xbc"
assert strutils.bytes_to_escaped_str(b"'") == r"'"
assert strutils.bytes_to_escaped_str(b'"') == r'"'
assert strutils.bytes_to_escaped_str(b"'", escape_single_quotes=True) == r"\'"
assert strutils.bytes_to_escaped_str(b'"', escape_single_quotes=True) == r'"'
assert strutils.bytes_to_escaped_str(b"\r\n\t") == "\\r\\n\\t"
assert strutils.bytes_to_escaped_str(b"\r\n\t", True) == "\r\n\t"
assert strutils.bytes_to_escaped_str(b"\n", True) == "\n"
assert strutils.bytes_to_escaped_str(b"\\n", True) == "\\ \\ n".replace(" ", "")
assert strutils.bytes_to_escaped_str(b"\\\n", True) == "\\ \\ \n".replace(" ", "")
assert strutils.bytes_to_escaped_str(b"\\\\n", True) == "\\ \\ \\ \\ n".replace(" ", "")
with pytest.raises(ValueError):
strutils.bytes_to_escaped_str(u"such unicode")
def test_escaped_str_to_bytes():
assert strutils.escaped_str_to_bytes("foo") == b"foo"
assert strutils.escaped_str_to_bytes("\x08") == b"\b"
assert strutils.escaped_str_to_bytes("&!?=\\\\)") == br"&!?=\)"
assert strutils.escaped_str_to_bytes(u"\\x08") == b"\b"
assert strutils.escaped_str_to_bytes(u"&!?=\\\\)") == br"&!?=\)"
assert strutils.escaped_str_to_bytes(u"\u00fc") == b'\xc3\xbc'
with pytest.raises(ValueError):
strutils.escaped_str_to_bytes(b"very byte")
def test_is_mostly_bin():
assert not strutils.is_mostly_bin(b"foo\xFF")
assert strutils.is_mostly_bin(b"foo" + b"\xFF" * 10)
assert not strutils.is_mostly_bin("")
def test_is_xml():
assert not strutils.is_xml(b"foo")
assert strutils.is_xml(b"<foo")
assert strutils.is_xml(b" \n<foo")
def test_clean_hanging_newline():
s = "foo\n"
assert strutils.clean_hanging_newline(s) == "foo"
assert strutils.clean_hanging_newline("foo") == "foo"
def test_hexdump():
assert list(strutils.hexdump(b"one\0" * 10))
ESCAPE_QUOTES = [
"'" + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + "'",
'"' + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + '"'
]
def test_split_special_areas():
assert strutils.split_special_areas("foo", ESCAPE_QUOTES) == ["foo"]
assert strutils.split_special_areas("foo 'bar' baz", ESCAPE_QUOTES) == ["foo ", "'bar'", " baz"]
assert strutils.split_special_areas(
"""foo 'b\\'a"r' baz""",
ESCAPE_QUOTES
) == ["foo ", "'b\\'a\"r'", " baz"]
assert strutils.split_special_areas(
"foo\n/*bar\nbaz*/\nqux",
[r'/\*[\s\S]+?\*/']
) == ["foo\n", "/*bar\nbaz*/", "\nqux"]
assert strutils.split_special_areas(
"foo\n//bar\nbaz",
[r'//.+$']
) == ["foo\n", "//bar", "\nbaz"]
def test_escape_special_areas():
assert strutils.escape_special_areas('foo "bar" baz', ESCAPE_QUOTES, "*") == 'foo "bar" baz'
esc = strutils.escape_special_areas('foo "b*r" b*z', ESCAPE_QUOTES, "*")
assert esc == 'foo "b\ue02ar" b*z'
assert strutils.unescape_special_areas(esc) == 'foo "b*r" b*z'
|
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.report.framework.expression;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import org.joda.beans.Bean;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.opengamma.strata.calc.runner.CalculationFunctions;
import com.opengamma.strata.market.amount.LegAmount;
import com.opengamma.strata.market.amount.LegAmounts;
/**
* Evaluates a token against a bean to produce another object.
* <p>
* The token must be the name of one of the properties of the bean and the result is the value of the property.
* <p>
* There is special handling of beans with a single property. The name of the property can be omitted from
* the expression if the bean only has one property.
* <p>
* For example, the bean {@link LegAmounts} has a single property named {@code amounts} containing a list of
* {@link LegAmount} instances. The following expressions are equivalent and both return the first amount in the
* list. {@code LegInitialNotional} is a measure that produces {@code LegAmounts}.
* <pre>
* Measures.LegInitialNotional.0
* Measures.LegInitialNotional.amounts.0
* </pre>
* <p>
* If the token matches the property then the default behaviour applies; the property value is returned and
* the remaining tokens do not include the property token. If the token doesn't match the property, the property value
* is returned but the token isn't consumed. i.e. the remaining tokens returned from {@link #evaluate} include
* the first token.
*/
public class BeanTokenEvaluator extends TokenEvaluator<Bean> {
@Override
public Class<Bean> getTargetType() {
return Bean.class;
}
@Override
public Set<String> tokens(Bean bean) {
if (bean.propertyNames().size() == 1) {
String singlePropertyName = Iterables.getOnlyElement(bean.propertyNames());
Object propertyValue = bean.property(singlePropertyName).get();
Set<String> valueTokens = ValuePathEvaluator.tokens(propertyValue);
return ImmutableSet.<String>builder()
.add(singlePropertyName)
.addAll(valueTokens)
.build();
} else {
return bean.propertyNames();
}
}
@Override
public EvaluationResult evaluate(
Bean bean,
CalculationFunctions functions,
String firstToken,
List<String> remainingTokens) {
Optional<String> propertyName = bean.propertyNames().stream()
.filter(p -> p.equalsIgnoreCase(firstToken))
.findFirst();
if (propertyName.isPresent()) {
Object propertyValue = bean.property(propertyName.get()).get();
return propertyValue != null ?
EvaluationResult.success(propertyValue, remainingTokens) :
EvaluationResult.failure("No value available for property '{}'", firstToken);
}
// The bean has a single property which doesn't match the token.
// Return the property value without consuming any tokens.
// This allows skipping over properties when the bean only has a single property.
if (bean.propertyNames().size() == 1) {
String singlePropertyName = Iterables.getOnlyElement(bean.propertyNames());
Object propertyValue = bean.property(singlePropertyName).get();
List<String> tokens = ImmutableList.<String>builder().add(firstToken).addAll(remainingTokens).build();
return propertyValue != null ?
EvaluationResult.success(propertyValue, tokens) :
EvaluationResult.failure("No value available for property '{}'", firstToken);
}
return invalidTokenFailure(bean, firstToken);
}
}
|
define([
'dojo/_base/declare',
'dstore/Rest',
'dstore/Trackable',
'dgrid/OnDemandGrid',
'dgrid/Editor'
], function (declare, Rest, Trackable, OnDemandGrid, Editor) {
var store = new Rest({
target: '/GetAllRequestHeaders/'
});
// Instantiate grid
var RequestHeadersGrid = new (declare([OnDemandGrid, Editor]))({
collection: store,
selectionMode: "single",
columns: {
Header_Name: {
label: 'Header Name',
editor: 'text',
editOn: 'click',
autoSave: true
},
Header_Value: {
label: 'Header Value'
}
}
}, 'MainRequestHeadesrsGrid');
return RequestHeadersGrid;
});
|
/*
* AuthenticationReceiverTest.java
*
* Copyright (c) 2015 Auth0 (http://auth0.com)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.auth0.lock.receiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.support.annotation.NonNull;
import android.support.v4.content.LocalBroadcastManager;
import com.auth0.core.Token;
import com.auth0.core.UserProfile;
import com.auth0.lock.BuildConfig;
import com.auth0.lock.Lock;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.RobolectricGradleTestRunner;
import org.robolectric.annotation.Config;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
@RunWith(RobolectricGradleTestRunner.class)
@Config(constants = BuildConfig.class, sdk = 21, manifest = Config.NONE)
public class AuthenticationReceiverTest {
private AuthenticationReceiver receiver;
@Mock private Intent intent;
@Mock private Context context;
@Mock private UserProfile profile;
@Mock private Token token;
@Mock private LocalBroadcastManager manager;
@Captor private ArgumentCaptor<IntentFilter> captor;
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
receiver = spy(new AuthenticationReceiver() {
@Override
public void onAuthentication(@NonNull UserProfile profile, @NonNull Token token) {
}
});
}
@Test
public void shouldHandleAuthenticationAction() throws Exception {
when(intent.getAction()).thenReturn(Lock.AUTHENTICATION_ACTION);
when(intent.getParcelableExtra(eq(Lock.AUTHENTICATION_ACTION_TOKEN_PARAMETER))).thenReturn(token);
when(intent.getParcelableExtra(eq(Lock.AUTHENTICATION_ACTION_PROFILE_PARAMETER))).thenReturn(profile);
receiver.onReceive(context, intent);
verify(receiver).onAuthentication(eq(profile), eq(token));
verifyNoMoreInteractions(receiver);
}
@Test
public void shouldHandleAuthenticationActionForSignUp() throws Exception {
when(intent.getAction()).thenReturn(Lock.AUTHENTICATION_ACTION);
when(intent.getParcelableExtra(eq(Lock.AUTHENTICATION_ACTION_TOKEN_PARAMETER))).thenReturn(null);
when(intent.getParcelableExtra(eq(Lock.AUTHENTICATION_ACTION_PROFILE_PARAMETER))).thenReturn(null);
receiver.onReceive(context, intent);
verify(receiver).onSignUp();
verifyNoMoreInteractions(receiver);
}
@Test
public void shouldHandleCancelAction() throws Exception {
when(intent.getAction()).thenReturn(Lock.CANCEL_ACTION);
receiver.onReceive(context, intent);
verify(receiver).onCancel();
verifyNoMoreInteractions(receiver);
}
@Test
public void shouldHandleChangePasswordAction() throws Exception {
when(intent.getAction()).thenReturn(Lock.CHANGE_PASSWORD_ACTION);
receiver.onReceive(context, intent);
verify(receiver).onChangePassword();
verifyNoMoreInteractions(receiver);
}
@Test
public void shouldIgnoreInvalidAction() throws Exception {
when(intent.getAction()).thenReturn("A STRANGE ACTION RECEIVED");
receiver.onReceive(context, intent);
verifyZeroInteractions(receiver);
}
@Test
public void shouldRegisterInBroadcastManager() throws Exception {
receiver.registerIn(manager);
verify(manager).registerReceiver(eq(receiver), captor.capture());
final IntentFilter intentFilter = captor.getValue();
assertThat(intentFilter.hasAction(Lock.AUTHENTICATION_ACTION), is(true));
assertThat(intentFilter.hasAction(Lock.CANCEL_ACTION), is(true));
assertThat(intentFilter.hasAction(Lock.CHANGE_PASSWORD_ACTION), is(true));
}
@Test
public void shouldUnregisterBroadcastManager() throws Exception {
receiver.unregisterFrom(manager);
verify(manager).unregisterReceiver(eq(receiver));
}
}
|
#
# db.py -- Database utilities.
#
# Copyright (c) 2007 David Trowbridge
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from django.db import models, IntegrityError
class ConcurrencyManager(models.Manager):
"""
A class designed to work around database concurrency issues.
"""
def get_or_create(self, **kwargs):
"""
A wrapper around get_or_create that makes a final attempt to get
the object if the creation fails.
This helps with race conditions in the database where, between the
original get() and the create(), another process created the object,
causing us to fail. We'll then execute a get().
This is still prone to race conditions, but they're even more rare.
A delete() would have to happen before the unexpected create() but
before the get().
"""
try:
return super(ConcurrencyManager, self).get_or_create(**kwargs)
except IntegrityError:
kwargs.pop('defaults', None)
return self.get(**kwargs)
|
/**
* This is the main method that converts the document to a collection of pages. Since this method can be slow (depending
* on the number of DOM elements in the document), it runs async and returns a promise.
* @param currentScroll
*/
function composePage(currentScroll) {
var deferred = new $.Deferred();
ROOT = $(OPTIONS.rootElement);
var fragment = createDocumentFragment();
CONTAINER = $(fragment.querySelector('#hrz-container'));
CONTAINER.css({
'display': 'none', // setting display:none considerably speeds up rendering
'top': 0,
'left': 0
});
VIEWPORT_HEIGHT = $(window).height() - OPTIONS.pageMargin * 2;
displayLoadingIndicator().then(function() {
// a setTimeout is used to force async execution and allow the loadingIndicator to display before the
// heavy computations of composePage() are begun.
setTimeout(function() {
if (!OPTIONS.displayScrollbar) {
$('body').css('overflow-y', 'hidden');
}
var allNodes = new NodeCollection(OPTIONS.selector);
PAGE_COLLECTION = pageCollectionGenerator.fromNodeCollection(allNodes);
PAGE_COLLECTION.appendToDom(currentScroll);
// remove any DOM nodes that are not included in the selector,
// since they will just be left floating around in the wrong place.
CONTAINER.children().not('.hrz-page').filter(':visible').remove();
ROOT.empty().append(fragment);
// add the theme's custom CSS to the document now so that it can be
// used in calculating the elements' styles.
addCustomCssToDocument();
PAGE_COLLECTION.forEach(function(page) {
page.nodes.forEach(function(node) {
node.renderStyles(page);
});
});
CONTAINER.css('display', '');
var documentHeight = PAGE_COLLECTION.last().bottom / OPTIONS.scrollbarShortenRatio + VIEWPORT_HEIGHT;
ROOT.height(documentHeight);
renderPageCount();
removeLoadingIndicator();
deferred.resolve();
}, 0);
});
return deferred.promise();
}
/**
* Add the CSS text loaded by loadCustomCss() into the document head.
*/
function addCustomCssToDocument() {
var $customCssElement = $('#hrz-custom-css');
if (0 < $customCssElement.length) {
$customCssElement.text(CUSTOM_CSS);
} else {
$('head').append('<style id="hrz-custom-css" type="text/css">' + CUSTOM_CSS + '</style>');
}
}
/**
* Building up a documentFragment and then appending it all at once to the DOM
* is done to improve performance.
* @returns {*}
*/
function createDocumentFragment() {
var fragment = document.createDocumentFragment();
var containerDiv = document.createElement('div');
containerDiv.id = 'hrz-container';
fragment.appendChild(containerDiv);
return fragment;
}
function displayLoadingIndicator() {
var deferred = new $.Deferred();
if ($('.hrz-loading-indicator').length === 0) {
$('body').append('<div class="hrz-loading-indicator" style="display:none;"><p class="hrz-loading-indicator">Loading...</p></div>');
$('div.hrz-loading-indicator').fadeIn(50, function() {
deferred.resolve();
});
}
return deferred.promise();
}
function removeLoadingIndicator() {
setTimeout(function() {
$('div.hrz-loading-indicator').fadeOut(50, function() {
$(this).remove();
});
}, 300);
}
function renderPageCount() {
if ($('.hrz-page-count').length === 0) {
var pageCountDiv = $('<div class="hrz-page-count"></div>');
$('body').append(pageCountDiv);
pageCountDiv.append('<span id="hrz-current-page"></span> / <span id="hrz-total-pages"></span>');
$('#hrz-total-pages').html(PAGE_COLLECTION.length);
if (!OPTIONS.displayPageCount) {
pageCountDiv.addClass('hidden');
}
}
}
function removePageCount() {
$('.hrz-page-count').remove();
}
function updatePageCount() {
$('#hrz-current-page').html(PAGE_COLLECTION.currentPage);
}
/**
* + Jonas Raoni Soares Silva
* @ http://jsfromhell.com/array/shuffle [v1.0]
* @param o
* @returns {*}
*/
function shuffle(o){
for(var j, x, i = o.length; i; j = Math.floor(Math.random() * i), x = o[--i], o[i] = o[j], o[j] = x);
return o;
}
function noop() {}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Compare Born and Rytov approximation
This script creates a colorfull plot.
"""
from __future__ import division
from __future__ import print_function
from matplotlib import pylab as plt
import numpy as np
import os
import sys
DIR = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, DIR+"/../")
import bornscat
rfac = 1
# Set measurement parameters
# Compute scattered field from cylinder
radius = 3 # wavelengths
nmed = 1.333
nsph = 1.343
size = 64*rfac # pixels
res = 4*rfac #23 # px/wavelengths
fft_method = "numpy"
# create refractive index map for Born
n = nmed * np.ones((size,size,size))
n0 = 1*n
rad = radius*res
x=np.linspace(-size/2,size/2,size, endpoint=False)
xv = x.reshape(-1, 1, 1)
yv = x.reshape( 1,-1, 1)
zv = x.reshape( 1, 1,-1)
n[np.where((xv**2+yv**2+zv**2 < rad**2))] = nsph
# Rytov
print("Rytov scattered wave")
rytov_u0 = bornscat.rytov_3d(n0, nmed, res, fft_method=fft_method)
rytov_u = bornscat.rytov_3d(n, nmed, res, fft_method=fft_method)
ro = rytov_u/rytov_u0
rph = np.angle(ro)
ram = np.abs(ro)
phakwargs = {"vmin": rph.min(),
"vmax": rph.max(),
"cmap": "coolwarm"}
ampkwargs = {"vmin": ram.min(),
"vmax": ram.max(),
"cmap": "gray"}
# Plot
fig, axes = plt.subplots(2,3)
axes = axes.transpose().flatten()
axes[0].set_title("Rytov phase z=0")
axes[0].imshow(rph[:,:,size//2], **phakwargs)
axes[1].set_title("Rytov amplitude z=0")
axes[1].imshow(ram[:,:,size//2], **ampkwargs)
axes[2].set_title("Rytov phase y=0")
axes[2].imshow(rph[:,size//2,:], **phakwargs)
axes[3].set_title("Rytov amplitude y=0")
axes[3].imshow(ram[:,size//2,:], **ampkwargs)
axes[4].set_title("Rytov phase x=0")
axes[4].imshow(rph[size//2,:,:], **phakwargs)
axes[5].set_title("Rytov amplitude x=0")
axes[5].imshow(ram[size//2,:,:], **ampkwargs)
plt.tight_layout()
plt.savefig(os.path.join(DIR, "born_rytov_plot_3d.png"))
|
/**
* @brief
* Printing of comments test.
* @file
* output_test.cpp
* @author
* Henrik Samuelsson
*/
#include <iostream>
int main ()
{
std::cout << "*/"; // Ok will print */
std::cout << "*/"; // Ok will print */
//std::cout << /* "*/" */; // Will not compile the first citation mark
// ends up inside a comment.
std::cout << /* "*/" /* "/*" */; // Ok will print */
return 0;
}
|
# coding=utf-8
# Foris - web administration interface for OpenWrt based on NETCONF
# Copyright (C) 2017 CZ.NIC, z.s.p.o. <http://www.nic.cz>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections
import gettext
import os
from jinja2.ext import InternationalizationExtension
from foris import BASE_DIR
from foris.langs import translations
from foris.state import current_state
# read locale directory
locale_directory = os.path.join(BASE_DIR, "locale")
class _LangDict(collections.OrderedDict):
def __missing__(self, key):
# return english translation if missing key
return super(_LangDict, self).__getitem__("en")
translations = _LangDict(
(e, gettext.translation("messages", locale_directory, languages=[e], fallback=True))
for e in translations
)
class SimpleDelayedTranslator(object):
def __init__(self, text):
self.text = text
def __str__(self):
return gettext(self.text)
def __add__(self, other):
return str(self) + other
gettext = lambda x: translations[current_state.language].gettext(x)
ngettext = lambda singular, plural, n: translations[current_state.language].ngettext(
singular, plural, n
)
gettext_dummy = lambda x: SimpleDelayedTranslator(x)
_ = gettext
def set_current_language(language):
"""Save interface language to foris.settings.lang.
:param lang: language code to save
:return: True on success, False otherwise
"""
if current_state.backend.perform("web", "set_language", {"language": language})["result"]:
# Update info variable
current_state.update_lang(language)
return True
return False
# for jinja template
class ForisInternationalizationExtension(InternationalizationExtension):
def __init__(self, environment):
super(ForisInternationalizationExtension, self).__init__(environment)
self._install_callables(gettext, ngettext)
i18n = ForisInternationalizationExtension
|
<?php if (!defined('BASEPATH')) exit('No direct script access allowed');
/**
*
*/
class Building_model extends MY_Model{
public $_table='building';
public $primary_key='buildingId';
}
/* End of file building_model.php */
/* Location: ./application/models/building_model.php*/
|
(function() {
'use strict';
angular.module('civic.services')
.factory('OrganizationsResource', OrganizationsResource)
.factory('Organizations', OrganizationsService);
// @ngInject
function OrganizationsResource($resource) {
return $resource('/api/organizations', {
}, {
query: {
method: 'GET',
isArray: false,
cache: true
},
get: {
url:'/api/organizations/:organizationId',
params: {organizationId: '@organizationId'},
method: 'GET',
isArray: false,
cache: true
},
queryEvidence: {
method: 'GET',
url: '/api/organizations/:organizationId/evidence_items',
isArray: false,
cache: false
}
});
}
// @ngInject
function OrganizationsService(OrganizationsResource, UserOrgsInterceptor) {
var item = {};
var collection = [];
var evidence_items = [];
return {
data: {
item: item,
collection: collection,
evidence_items: evidence_items
},
query: query,
get: get,
queryEvidence: queryEvidence
};
function interceptMembers(organization) {
if(organization.members) {
organization.members = organization.members.map(function(user) {
var mock = {};
mock.data = user;
return UserOrgsInterceptor(mock);
});
}
return organization;
}
function query(reqObj) {
return OrganizationsResource.query(reqObj).$promise
.then(function(response) {
response.records = response.result.map(function(org) {
return interceptMembers(org);
});
angular.copy(response.records, collection);
return response.$promise;
});
}
function get(organizationId) {
return OrganizationsResource.get({
organizationId: organizationId
}).$promise
.then(function(response) {
response = interceptMembers(response);
angular.copy(response, item);
return response.$promise;
});
}
function queryEvidence(organizationId) {
return OrganizationsResource.queryEvidence({
organizationId: organizationId,
count: 999
}).$promise
.then(function(response) {
angular.copy(response.records, evidence_items);
return response.$promise;
});
}
}
})();
|
using System;
namespace Features.CrossPlatform.Views
{
public partial class CoroutineView
{
public CoroutineView ()
{
InitializeComponent ();
}
}
}
|
<div id="doc-template">
<h1>
<em>13.</em>
<?php echo _("Differences"); ?>
</h1>
<div id="challenge-descroption">
<p>
<?php echo _("Uh oh, looks like there has been some additions and
changes to the octocat family. Let's take a look at what is
different from our last commit by using the git diff command."); ?>
</p>
<p>
<?php echo _("In this case we want the diff of our most recent
commit, which we can refer to using the HEAD pointer."); ?>
</p>
<p>
<code dir="ltr" class="input-command">
git diff HEAD
</code>
</p>
</div>
</div>
|
/*
This file is part of:
NoahFrame
https://github.com/ketoo/NoahGameFrame
Copyright 2009 - 2021 NoahFrame(NoahGameFrame)
File creator: lvsheng.huang
NoahFrame is open-source software and you can redistribute it and/or modify
it under the terms of the License; besides, anyone who use this file/software must include this copyright announcement.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#ifndef NFI_RESPONSE_H
#define NFI_RESPONSE_H
#include <map>
#include <iostream>
#include <string>
#include <sstream>
#include "Dependencies/ajson/ajson.hpp"
class NFIResponse
{
public:
enum ResponseType
{
RES_TYPE_SUCCESS,
RES_TYPE_FAILED,
RES_TYPE_AUTH_FAILED,
};
public:
ResponseType code;
std::string message;
};
AJSON(NFIResponse, code, message)
#endif
|
import discord
from discord.ext import commands
from .utils.dataIO import fileIO
from .utils import checks
from __main__ import user_allowed
import os
class CustomCommands:
"""Custom commands."""
def __init__(self, bot):
self.bot = bot
self.c_commands = fileIO("data/customcom/commands.json", "load")
@commands.command(pass_context=True, no_pm=True)
@checks.mod_or_permissions(manage_server=True)
async def addcom(self, ctx, command : str, *text):
"""Adds a custom command
Example:
!addcom yourcommand Text you want
"""
if text == ():
await self.bot.say("addcom [command] [text/url]")
return
server = ctx.message.server
channel = ctx.message.channel
text = " ".join(text)
if not server.id in self.c_commands:
self.c_commands[server.id] = {}
cmdlist = self.c_commands[server.id]
if command not in cmdlist:
cmdlist[command] = text
self.c_commands[server.id] = cmdlist
fileIO("data/customcom/commands.json", "save", self.c_commands)
await self.bot.say("`Custom command successfully added.`")
else:
await self.bot.say("`This command already exists. Use editcom to edit it.`")
@commands.command(pass_context=True, no_pm=True)
@checks.mod_or_permissions(manage_server=True)
async def editcom(self, ctx, command : str, *text):
"""Edits a custom command
Example:
!editcom yourcommand Text you want
"""
if text == ():
await self.bot.say("editcom [command] [text/url]")
return
server = ctx.message.server
channel = ctx.message.channel
text = " ".join(text)
if server.id in self.c_commands:
cmdlist = self.c_commands[server.id]
if command in cmdlist:
cmdlist[command] = text
self.c_commands[server.id] = cmdlist
fileIO("data/customcom/commands.json", "save", self.c_commands)
await self.bot.say("`Custom command successfully edited.`")
else:
await self.bot.say("`That command doesn't exist. Use addcom [command] [text]`")
else:
await self.bot.say("`There are no custom commands in this server. Use addcom [command] [text]`")
@commands.command(pass_context=True, no_pm=True)
@checks.mod_or_permissions(manage_server=True)
async def delcom(self, ctx, command : str):
"""Deletes a custom command
Example:
!delcom yourcommand"""
server = ctx.message.server
channel = ctx.message.channel
if server.id in self.c_commands:
cmdlist = self.c_commands[server.id]
if command in cmdlist:
cmdlist.pop(command, None)
self.c_commands[server.id] = cmdlist
fileIO("data/customcom/commands.json", "save", self.c_commands)
await self.bot.send_message(channel, "`Custom command successfully deleted.`")
else:
await self.bot.say("`That command doesn't exist.`")
else:
await self.bot.send_message(channel, "`There are no custom commands in this server. Use addcom [command] [text]`")
async def checkCC(self, message):
if message.author.id == self.bot.user.id or len(message.content) < 2 or message.channel.is_private:
return
if not user_allowed(message):
return
msg = message.content
server = message.server
prefix = self.get_prefix(msg)
if prefix and server.id in self.c_commands.keys():
cmdlist = self.c_commands[server.id]
cmd = msg[len(prefix):]
if cmd in cmdlist.keys():
await self.bot.send_message(message.channel, cmdlist[cmd])
def get_prefix(self, msg):
for p in self.bot.command_prefix:
if msg.startswith(p):
return p
return False
def check_folders():
if not os.path.exists("data/customcom"):
print("Creating data/customcom folder...")
os.makedirs("data/customcom")
def check_files():
f = "data/customcom/commands.json"
if not fileIO(f, "check"):
print("Creating empty commands.json...")
fileIO(f, "save", {})
def setup(bot):
check_folders()
check_files()
n = CustomCommands(bot)
bot.add_listener(n.checkCC, "on_message")
bot.add_cog(n)
|
#!/usr/bin/env python
#
# Wrapper script for starting the biopet-extractadaptersfastqc JAR package
#
# This script is written for use with the Conda package manager and is copied
# from the peptide-shaker wrapper. Only the parameters are changed.
# (https://github.com/bioconda/bioconda-recipes/blob/master/recipes/peptide-shaker/peptide-shaker.py)
#
# This file was automatically generated by the sbt-bioconda plugin.
import os
import subprocess
import sys
import shutil
from os import access
from os import getenv
from os import X_OK
jar_file = 'ExtractAdaptersFastqc-assembly-0.2.jar'
default_jvm_mem_opts = []
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts = []
prop_opts = []
pass_args = []
exec_dir = None
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
elif arg.startswith('--exec_dir='):
exec_dir = arg.split('=')[1].strip('"').strip("'")
if not os.path.exists(exec_dir):
shutil.copytree(real_dirname(sys.argv[0]), exec_dir, symlinks=False, ignore=None)
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') is None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args, exec_dir)
def main():
"""
PeptideShaker updates files relative to the path of the jar file.
In a multiuser setting, the option --exec_dir="exec_dir"
can be used as the location for the peptide-shaker distribution.
If the exec_dir dies not exist,
we copy the jar file, lib, and resources to the exec_dir directory.
"""
java = java_executable()
(mem_opts, prop_opts, pass_args, exec_dir) = jvm_opts(sys.argv[1:])
jar_dir = exec_dir if exec_dir else real_dirname(sys.argv[0])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()
|
#
# $Id: build_ansi.py 9736 2011-06-20 16:49:22Z ahartvigsen $
#
# Proprietary and confidential.
# Copyright $Date:: 2011#$ Perfect Search Corporation.
# All rights reserved.
#
import sys
import getch
def prompt(msg, choices=None, default='', normfunc=None, readline=None):
'''
Ask user a question and read their response.
@param choices A string that enumerates possible answers.
@param default A string that will be returned if the user presses Enter. If
default is None (as opposed to the empty string), the empty string
will not be considered a valid answer, and the user will be
re-prompted until they satisfy the function.
@param normfunc A function that will be used to normalize the answer.
Takes a string and returns an answer in any data type.
@param readline A function that will be used to read the user's answer.
Normally this is sys.stdin.readline, but it can also be
readline_masked() if prompting for a password.
'''
txt = msg
showDefault = not (default is None) and not (default == '')
if choices:
if showDefault:
txt += ' (%s; =%s)' % (choices, str(default))
else:
txt += ' (%s)' % choices
elif showDefault:
txt += ' ( =%s)' % str(default)
txt += ' '
# We can't bind this value in the function prototype, because then it would
# be bound once, forever. In that case any attempt to override/redirect
# stdin would fail...
if readline is None:
readline = sys.stdin.readline
while True:
sys.stdout.write(txt)
answer = readline().rstrip()
if normfunc:
answer = normfunc(answer)
if not answer:
if default is None:
continue
answer = default
return answer
def prompt_bool(msg, default=None):
'''
Ask user for a yes/no answer.
@param default If None, don't default and keep asking until either 'y'
or 'n' is received. Otherwise, use this value when user presses
Enter.
'''
while True:
sys.stdout.write(msg)
if default is None:
sys.stdout.write(' (y/n) ')
elif default:
sys.stdout.write(' (Y/n) ')
else:
sys.stdout.write(' (y/N) ')
answer = sys.stdin.readline().strip().lower()
if not answer or (not (answer[0] in ['y','n'])):
if not (default is None):
return default
else:
return answer[0] == 'y'
def readline_masked(mask_char='*'):
'''
Read keystrokes from stdin until Enter is pressed, but don't display those
keystrokes. Useful in password prompts.
@param mask_char Char to display with each keystroke. If None, nothing is
displayed and cursor doesn't move.
'''
value = ''
while True:
c = getch.getch()
if (c == '\n') or (c == '\r') or (c == '\x1B'):
print('')
break
elif (c == '\x08'):
if len(value) > 0:
value = value[:-1]
sys.stdout.write('\b \b')
else:
value += c
if not (mask_char is None):
sys.stdout.write(mask_char)
return value
|
<?php
/**
* @package Joomla.Libraries
* @subpackage Form
*
* @copyright Copyright (C) 2005 - 2013 Open Source Matters, Inc. All rights reserved.
* @license GNU General Public License version 2 or later; see LICENSE.txt
*/
defined('JPATH_BASE') or die;
JFormHelper::loadFieldClass('list');
/**
* Form Field class for the Joomla Framework.
*
* @package Joomla.Libraries
* @subpackage Form
* @since 3.1
*/
class JFormFieldTag extends JFormFieldList
{
/**
* A flexible tag list that respects access controls
*
* @var string
* @since 3.1
*/
public $type = 'Tag';
/**
* Flag to work with nested tag field
*
* @var boolean
* @since 3.1
*/
public $isNested = null;
/**
* com_tags parameters
*
* @var JRegistry
* @since 3.1
*/
protected $comParams = null;
/**
* Constructor
*
* @since 3.1
*/
public function __construct()
{
parent::__construct();
// Load com_tags config
$this->comParams = JComponentHelper::getParams('com_tags');
}
/**
* Method to get the field input for a tag field.
*
* @return string The field input.
*
* @since 3.1
*/
protected function getInput()
{
// AJAX mode requires ajax-chosen
if (!$this->isNested())
{
// Get the field id
$id = isset($this->element['id']) ? $this->element['id'] : null;
$cssId = '#' . $this->getId($id, $this->element['name']);
// Load the ajax-chosen customised field
JHtml::_('tag.ajaxfield', $cssId, $this->allowCustom());
}
if (!is_array($this->value) && !empty($this->value))
{
if ($this->value instanceof JHelperTags)
{
if (empty($this->value->tags))
{
$this->value = array();
}
else
{
$this->value = $this->value->tags;
}
}
// String in format 2,5,4
if (is_string($this->value))
{
$this->value = explode(',', $this->value);
}
}
$input = parent::getInput();
return $input;
}
/**
* Method to get a list of tags
*
* @return array The field option objects.
*
* @since 3.1
*/
protected function getOptions()
{
$published = $this->element['published']? $this->element['published'] : array(0,1);
$db = JFactory::getDbo();
$query = $db->getQuery(true)
->select('a.id AS value, a.path, a.title AS text, a.level, a.published')
->from('#__tags AS a')
->join('LEFT', $db->quoteName('#__tags') . ' AS b ON a.lft > b.lft AND a.rgt < b.rgt');
// Ajax tag only loads assigned values
if (!$this->isNested() && !empty($this->value))
{
// Only item assigned values
$values = (array) $this->value;
JArrayHelper::toInteger($values);
$query->where('a.id IN (' . implode(',', $values) . ')');
}
// Block the possibility to set a tag as it own parent
$id = (int) $this->form->getValue('id', 0);
$name = (int) $this->form->getValue('name', '');
if ($name == 'com_tags.tag')
{
$query->where('a.id != ' . $db->quote($id));
}
// Filter language
if (!empty($this->element['language']))
{
$query->where('a.language = ' . $db->quote($this->element['language']));
}
$query->where($db->quoteName('a.alias') . ' <> ' . $db->quote('root'));
// Filter on the published state
if (is_numeric($published))
{
$query->where('a.published = ' . (int) $published);
}
elseif (is_array($published))
{
JArrayHelper::toInteger($published);
$query->where('a.published IN (' . implode(',', $published) . ')');
}
$query->group('a.id, a.title, a.level, a.lft, a.rgt, a.parent_id, a.published, a.path')
->order('a.lft ASC');
// Get the options.
$db->setQuery($query);
try
{
$options = $db->loadObjectList();
}
catch (RuntimeException $e)
{
return false;
}
// Merge any additional options in the XML definition.
$options = array_merge(parent::getOptions(), $options);
// Prepare nested data
if ($this->isNested())
{
$this->prepareOptionsNested($options);
}
else
{
$options = JHelperTags::convertPathsToNames($options);
}
return $options;
}
/**
* Add "-" before nested tags, depending on level
*
* @param array &$options Array of tags
*
* @return array The field option objects.
*
* @since 3.1
*/
protected function prepareOptionsNested(&$options)
{
if ($options)
{
foreach ($options as &$option)
{
$repeat = (isset($option->level) && $option->level - 1 >= 0) ? $option->level - 1 : 0;
$option->text = str_repeat('- ', $repeat) . $option->text;
}
}
return $options;
}
/**
* Determine if the field has to be tagnested
*
* @return boolean
*
* @since 3.1
*/
public function isNested()
{
if (is_null($this->isNested))
{
// If mode="nested" || ( mode not set & config = nested )
if ((isset($this->element['mode']) && $this->element['mode'] == 'nested')
|| (!isset($this->element['mode']) && $this->comParams->get('tag_field_ajax_mode', 1) == 0))
{
$this->isNested = true;
}
}
return $this->isNested;
}
/**
* Determines if the field allows or denies custom values
*
* @return boolean
*/
public function allowCustom()
{
if (isset($this->element['custom']) && $this->element['custom'] == 'deny')
{
return false;
}
return true;
}
}
|
import { Record } from 'immutable';
const InitialState = Record({
selectedSymbol: 'ciseaux',
});
const initialState = new InitialState;
export default (state = initialState, action) => {
switch (action.type) {
case 'SELECT_SYMBOL':
return state.set('selectedSymbol', action.payload);
default:
return state;
}
};
|
/* arch/arm/mach-msm/include/mach/hardware.h
*
* Copyright (C) 2007 Google, Inc.
*
* This software is licensed under the terms of the GNU General Public
* License version 2, as published by the Free Software Foundation, and
* may be copied, distributed, and modified under those terms.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*/
|
from __future__ import with_statement
import unittest
if not hasattr(unittest.TestCase, 'assertIs'):
import unittest2 as unittest
from potpy import template
class TestTemplate(unittest.TestCase):
def test_empty_string(self):
t = template.Template('')
self.assertEqual(t.regex.pattern, '$')
self.assertEqual(t.fill_template, '')
def test_basic_string(self):
t = template.Template('abc')
self.assertEqual(t.regex.pattern, 'abc$')
self.assertEqual(t.fill_template, 'abc')
def test_slash(self):
t = template.Template('abc/def')
self.assertEqual(t.regex.pattern, 'abc/def$')
self.assertEqual(t.fill_template, 'abc/def')
def test_slashes(self):
t = template.Template('abc/def/ghi')
self.assertEqual(t.regex.pattern, 'abc/def/ghi$')
self.assertEqual(t.fill_template, 'abc/def/ghi')
def test_brackets(self):
t = template.Template('abc/{def}/ghi')
self.assertEqual(t.regex.pattern, 'abc/(?P<def>.*)/ghi$')
self.assertEqual(t.fill_template, 'abc/%(def)s/ghi')
def test_brackets_and_regex(self):
t = template.Template(r'{def:\d+}')
self.assertEqual(t.regex.pattern, r'(?P<def>\d+)$')
self.assertEqual(t.fill_template, r'%(def)s')
def test_brackets_in_regex(self):
t = template.Template(r'{foo:\d{3}}')
self.assertEqual(t.regex.pattern, r'(?P<foo>\d{3})$')
self.assertEqual(t.fill_template, r'%(foo)s')
def test_left_heavy_brackets(self):
with self.assertRaises(ValueError) as assertion:
template.Template(r'{def:\d{3}'),
self.assertEqual(
str(assertion.exception),
'unbalanced brackets'
)
def test_right_heavy_brackets(self):
t = template.Template(r'{def:\d+}}')
self.assertEqual(t.regex.pattern, r'(?P<def>\d+)}$')
self.assertEqual(t.regex.match('1}').groupdict()['def'], '1')
self.assertEqual(t.fill_template, r'%(def)s}')
def test_backslash(self):
t = template.Template(r'a\c')
self.assertEqual(t.regex.pattern, r'a\\c$')
self.assertEqual(t.fill_template, r'a\\c')
def test_multiple_brackets(self):
t = template.Template('{abc}/{def:\d{3}}{ghi}jkl')
self.assertEqual(
t.regex.pattern,
'(?P<abc>.*)/(?P<def>\d{3})(?P<ghi>.*)jkl$'
)
self.assertEqual(
t.fill_template,
'%(abc)s/%(def)s%(ghi)sjkl'
)
def test_literal_brackets(self):
t = template.Template('foo{{bar}')
self.assertEqual(t.regex.pattern, r'foo\{bar}$')
self.assertIsNot(t.regex.match('foo{bar}'), None)
self.assertEqual(t.fill_template, 'foo{bar}')
def test_two_consecutive_literal_brackets(self):
t = template.Template('foo{{{{bar}')
self.assertEqual(t.regex.pattern, r'foo\{\{bar}$')
self.assertIsNot(t.regex.match('foo{{bar}'), None)
self.assertEqual(t.fill_template, 'foo{{bar}')
def test_literal_bracket_at_end_of_string(self):
t = template.Template('foo{{')
self.assertEqual(t.regex.pattern, r'foo\{$')
self.assertIsNot(t.regex.match('foo{'), None)
self.assertEqual(t.fill_template, 'foo{')
def test_literal_bracket_before_bracket(self):
t = template.Template('foo{{{bar}')
self.assertEqual(t.regex.pattern, r'foo\{(?P<bar>.*)$')
self.assertEqual(
t.regex.match('foo{baz').groupdict()['bar'],
'baz'
)
self.assertEqual(t.fill_template, 'foo{%(bar)s')
def test_unbalanced_bracket_at_end_of_string(self):
with self.assertRaises(ValueError) as assertion:
template.Template('foo{')
self.assertEqual(
str(assertion.exception),
'unbalanced brackets'
)
def test_percent(self):
t = template.Template('foo%bar')
self.assertEqual(t.fill_template, 'foo%%bar')
def test_percent_in_brackets(self):
t = template.Template('{foo:bar%bar}')
self.assertEqual(t.fill_template, '%(foo)s')
def test_fill(self):
t = template.Template('{foo}, {bar}')
self.assertEqual(t.fill(foo='baz', bar='qux'), 'baz, qux')
def test_type_conversion(self):
t = template.Template('{foo:\d+}', foo=int)
self.assertEqual(t.match('42')['foo'], 42)
if __name__ == '__main__':
unittest.main()
|
define(function (require) {
var activity = require("sugar-web/activity/activity");
var datastore = require("sugar-web/datastore");
// Manipulate the DOM only when it is ready.
require(['domReady!'], function (doc) {
// Initialize the activity.
activity.setup();
inputTextContent = document.getElementById("wmd-input-second");
inputTextContent.value = "#This is a sample input";
//to save and resume the contents from datastore.
var datastoreObject = activity.getDatastoreObject();
inputTextContent.onblur = function () {
var jsonData = JSON.stringify((inputTextContent.value).toString());
datastoreObject.setDataAsText(jsonData);
datastoreObject.save(function () {});
};
markdownParsing(); //to load for the first time
datastoreObject.loadAsText(function (error, metadata, data) {
markdowntext = JSON.parse(data);
inputTextContent.value = markdowntext;
markdownParsing(); //to load again when there is a datastore entry
});
var journal = document.getElementById("insertText");
journal.onclick = function () {
activity.showObjectChooser(function (error, result) {
//result1 = result.toString();
var datastoreObject2 = new datastore.DatastoreObject(result);
datastoreObject2.loadAsText(function (error, metadata, data) {
try {
textdata = JSON.parse(data);
} catch (e) {
textdata = data;
}
var inputTextContent = document.getElementById("wmd-input-second");
//inputTextContent.value += textdata;
insertAtCursor(inputTextContent, textdata);
});
});
};
function insertAtCursor(myField, myValue) {
//IE support
if (document.selection) {
myField.focus();
sel = document.selection.createRange();
sel.text = myValue;
}
//MOZILLA and others
else if (myField.selectionStart || myField.selectionStart == '0') {
var startPos = myField.selectionStart;
var endPos = myField.selectionEnd;
myField.value = myField.value.substring(0, startPos)
+ myValue
+ myField.value.substring(endPos, myField.value.length);
} else {
myField.value += myValue;
}
//markdownParsing();
}
function markdownParsing() {
var converter2 = new Markdown.Converter();
var help = function () {
alert("Do you need help?");
}
var options = {
helpButton: {
handler: help
},
strings: {
quoteexample: "whatever you're quoting, put it right here"
}
};
var editor2 = new Markdown.Editor(converter2, "-second", options);
editor2.run();
}
});
});
|
/**
* @file gpio.c
* @brief
*
* DAPLink Interface Firmware
* Copyright (c) 2009-2016, ARM Limited, All Rights Reserved
* Copyright (c) 2016-2017 NXP
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "fsl_device_registers.h"
#include "DAP_config.h"
#include "gpio.h"
#include "daplink.h"
#include "hic_init.h"
#include "fsl_clock.h"
static void busy_wait(uint32_t cycles)
{
volatile uint32_t i = cycles;
while (i > 0) {
i--;
}
}
void gpio_init(void)
{
// Enable hardfault on unaligned access for the interface only.
// If this is done in the bootloader than then it might (will) break
// older application firmware or firmware from 3rd party vendors.
#if defined(DAPLINK_IF)
SCB->CCR |= SCB_CCR_UNALIGN_TRP_Msk;
#endif
// enable clock to ports
SIM->SCGC5 |= SIM_SCGC5_PORTA_MASK | SIM_SCGC5_PORTB_MASK | SIM_SCGC5_PORTC_MASK | SIM_SCGC5_PORTD_MASK | SIM_SCGC5_PORTE_MASK;
SIM->SCGC6 |= SIM_SCGC6_DMAMUX_MASK;
// configure pin as GPIO
LED_CONNECTED_PORT->PCR[LED_CONNECTED_BIT] = PORT_PCR_MUX(1);
// led off - enable output
LED_CONNECTED_GPIO->PDOR = 1UL << LED_CONNECTED_BIT;
LED_CONNECTED_GPIO->PDDR = 1UL << LED_CONNECTED_BIT;
// led on
LED_CONNECTED_GPIO->PCOR = 1UL << LED_CONNECTED_BIT;
// reset button configured as gpio input
PIN_nRESET_GPIO->PDDR &= ~PIN_nRESET;
PIN_nRESET_PORT->PCR[PIN_nRESET_BIT] = PORT_PCR_MUX(1);
/* Enable LVLRST_EN */
PIN_nRESET_EN_PORT->PCR[PIN_nRESET_EN_BIT] = PORT_PCR_MUX(1) | /* GPIO */
PORT_PCR_ODE_MASK; /* Open-drain */
PIN_nRESET_EN_GPIO->PSOR = PIN_nRESET_EN;
PIN_nRESET_EN_GPIO->PDDR |= PIN_nRESET_EN;
// Configure SWO UART RX.
PIN_SWO_RX_PORT->PCR[PIN_SWO_RX_BIT] = PORT_PCR_MUX(3); // UART1
PIN_SWO_RX_GPIO->PDDR &= ~(1 << PIN_SWO_RX_BIT); // Input
// Enable pulldowns on power monitor control signals to reduce power consumption.
PIN_CTRL0_PORT->PCR[PIN_CTRL0_BIT] = PORT_PCR_MUX(1) | PORT_PCR_PE_MASK | PORT_PCR_PS(0);
PIN_CTRL1_PORT->PCR[PIN_CTRL1_BIT] = PORT_PCR_MUX(1) | PORT_PCR_PE_MASK | PORT_PCR_PS(0);
PIN_CTRL2_PORT->PCR[PIN_CTRL2_BIT] = PORT_PCR_MUX(1) | PORT_PCR_PE_MASK | PORT_PCR_PS(0);
PIN_CTRL3_PORT->PCR[PIN_CTRL3_BIT] = PORT_PCR_MUX(1) | PORT_PCR_PE_MASK | PORT_PCR_PS(0);
// Enable pulldown on GPIO0_B to prevent it floating.
PIN_GPIO0_B_PORT->PCR[PIN_GPIO0_B_BIT] = PORT_PCR_MUX(1) | PORT_PCR_PE_MASK | PORT_PCR_PS(0);
// configure power enable pin as GPIO
PIN_POWER_EN_PORT->PCR[PIN_POWER_EN_BIT] = PORT_PCR_MUX(1);
// set output to 0
PIN_POWER_EN_GPIO->PCOR = PIN_POWER_EN;
// switch gpio to output
PIN_POWER_EN_GPIO->PDDR |= PIN_POWER_EN;
// Let the voltage rails stabilize. This is especailly important
// during software resets, since the target's 3.3v rail can take
// 20-50ms to drain. During this time the target could be driving
// the reset pin low, causing the bootloader to think the reset
// button is pressed.
// Note: With optimization set to -O2 the value 1000000 delays for ~85ms
busy_wait(1000000);
}
void gpio_set_board_power(bool powerEnabled)
{
if (powerEnabled) {
// enable power switch
PIN_POWER_EN_GPIO->PSOR = PIN_POWER_EN;
}
else {
// disable power switch
PIN_POWER_EN_GPIO->PCOR = PIN_POWER_EN;
}
}
uint32_t UART1_GetFreq(void)
{
return CLOCK_GetCoreSysClkFreq();
}
void UART1_InitPins(void)
{
// RX pin inited in gpio_init();
// TX not used.
}
void UART1_DeinitPins(void)
{
// No need to deinit the RX pin.
// TX not used.
}
void gpio_set_hid_led(gpio_led_state_t state)
{
if (state) {
LED_CONNECTED_GPIO->PCOR = LED_CONNECTED; // LED on
} else {
LED_CONNECTED_GPIO->PSOR = LED_CONNECTED; // LED off
}
}
void gpio_set_cdc_led(gpio_led_state_t state)
{
gpio_set_hid_led(state);
}
void gpio_set_msc_led(gpio_led_state_t state)
{
gpio_set_hid_led(state);
}
uint8_t gpio_get_reset_btn_no_fwrd(void)
{
return (PIN_nRESET_GPIO->PDIR & PIN_nRESET) ? 0 : 1;
}
uint8_t gpio_get_reset_btn_fwrd(void)
{
return 0;
}
|
package com.ambimmort.nisp3.controller.f.domain.view;
import com.ambimmort.nisp3.model.ui.f.area.DomainBean;
import com.ambimmort.nisp3.service.def.IAreaManagementService;
import com.ambimmort.nisp3.service.impl.AreaManagementServiceImpl;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import java.util.ArrayList;
/**
* Created by pc on 2015/6/10.
*/
@Controller("/f/um/domain/list.view.do")
public class List {
private Log logger = LogFactory.getLog(List.class);
private ModelAndView error = new ModelAndView("pub/error");
private ModelAndView mv = new ModelAndView("f/um/domain/list");
private String redirectURL = "/f/um/domain/list.view.do";
@Autowired
private IAreaManagementService areaMangementService;
@RequestMapping("/f/um/domain/list.view.do")
public ModelAndView action(HttpServletRequest request){
//取得当前登录用户username
//判断当前登录用户是否为 "admin",只有admin能执行区域操作!
String uname = request.getUserPrincipal().getName();
if (!uname.equals("admin")){
error.addObject("message","您无权限进行此操作");
error.addObject("redirectURL",redirectURL);
return error;
}
try {
//获取当前用户所管辖的所有区域列表
java.util.List<DomainBean> areas = areaMangementService.listAreasByAreaId("-1");
//将区域列表转成JSONArray
java.util.List<JSONObject> nl = new ArrayList<>();
for (DomainBean db:areas){
nl.add(JSONObject.fromObject(db));
}
mv.addObject("uiareas",JSONArray.fromObject(nl));
return mv;
} catch (Exception e) {
e.printStackTrace();
error.addObject("message",e.getMessage());
error.addObject("redirectURL", redirectURL);
return error;
}
}
}
|
'''OpenGL extension NV.shader_buffer_store
This module customises the behaviour of the
OpenGL.raw.GL.NV.shader_buffer_store to provide a more
Python-friendly API
Overview (from the spec)
This extension builds upon the mechanisms added by the
NV_shader_buffer_load extension to allow shaders to perform random-access
reads to buffer object memory without using dedicated buffer object
binding points. Instead, it allowed an application to make a buffer
object resident, query a GPU address (pointer) for the buffer object, and
then use that address as a pointer in shader code. This approach allows
shaders to access a large number of buffer objects without needing to
repeatedly bind buffers to a limited number of fixed-functionality binding
points.
This extension lifts the restriction from NV_shader_buffer_load that
disallows writes. In particular, the MakeBufferResidentNV function now
allows READ_WRITE and WRITE_ONLY access modes, and the shading language is
extended to allow shaders to write through (GPU address) pointers.
Additionally, the extension provides built-in functions to perform atomic
memory transactions to buffer object memory.
As with the shader writes provided by the EXT_shader_image_load_store
extension, writes to buffer object memory using this extension are weakly
ordered to allow for parallel or distributed shader execution. The
EXT_shader_image_load_store extension provides mechanisms allowing for
finer control of memory transaction order, and those mechanisms apply
equally to buffer object stores using this extension.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/NV/shader_buffer_store.txt
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.NV.shader_buffer_store import *
### END AUTOGENERATED SECTION
|
from django.shortcuts import render_to_response
from django.core.context_processors import csrf
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth.decorators import login_required
import json
from django.contrib.gis.geos import Point
from fruitlocations.models import FruitLocations
from fruitlocations.forms import AddFruit
# @login_required
def find_fruit(request):
return render_to_response("fruitlocations/find_fruit.html")
# Take all data from FruitLocations model table and turn it into a json object to send to map via JS/AJAX (see find_fruit.js)
def ajax_get_fruit(request):
if request.is_ajax() and request.method == "GET":
fruits = FruitLocations.objects.all()
fruit_markers = {}
for object in fruits:
fruit_markers[object.id] = {"fruit_variety": object.fruit_variety, "geom": object.geom}
print(fruit_markers)
return HttpResponse(json.dumps(fruit_markers), content_type="application/json")
else:
return 'Http404'
# Create add fruit form that takes spatial data from a marker on a map garnered through JS/AJAX (see add_fruit.js) and save to DB
def add_fruit(request):
if request.method == 'POST':
form = AddFruit(request.POST)
if form.is_valid():
new_point = FruitLocations()
cd = form.cleaned_data
coordinates = cd['coordinates'].split(',')
new_point.geom = Point(float(coordinates[0]), float(coordinates[1]))
new_point.fruit_variety = cd['fruit_variety']
new_point.save()
return render_to_response('fruitlocations/AddFruit_success.html')
else:
return render_to_response('fruitlocations/AddFruit_fail.html')
else:
form = AddFruit()
token = {}
token.update(csrf(request))
token['form'] = AddFruit()
return render_to_response('fruitlocations/add_fruit.html', token)
|
import pytest
import random
@pytest.mark.bdb
@pytest.mark.genesis
def test_stepping_changefeed_produces_update(b, steps):
tx = input_single_create(b)
# timeouts are 0 so will reassign immediately
steps.stale_check_transactions()
steps.stale_reassign_transactions()
# We expect 2 changefeed events
steps.block_changefeed()
steps.block_changefeed()
assert steps.counts == {'block_filter_tx': 2}
assert ([tx['id'] for (tx,) in steps.queues['block_filter_tx']] ==
[tx.id, tx.id])
@pytest.mark.bdb
@pytest.mark.genesis
def test_dupe_tx_in_block(b, steps):
tx = input_single_create(b)
for i in range(2):
steps.stale_check_transactions()
steps.stale_reassign_transactions()
steps.block_changefeed()
steps.block_filter_tx()
steps.block_validate_tx()
steps.block_validate_tx()
assert steps.counts == {'block_create': 2}
steps.block_create(timeout=False)
block = steps.block_create(timeout=True)
assert block.transactions == [tx]
def input_single_create(b):
from bigchaindb.common.transaction import Transaction
metadata = {'r': random.random()}
tx = Transaction.create([b.me], [([b.me], 1)], metadata).sign([b.me_private])
b.write_transaction(tx)
return tx
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Maui
{
public static class IEnumerableExtension
{
public static bool Empty<T>( this IEnumerable<T> set )
{
return !set.Any();
}
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.list = exports.nodes = void 0;
var t = _interopRequireWildcard(require("@babel/types"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function crawl(node, state = {}) {
if (t.isMemberExpression(node)) {
crawl(node.object, state);
if (node.computed) crawl(node.property, state);
} else if (t.isBinary(node) || t.isAssignmentExpression(node)) {
crawl(node.left, state);
crawl(node.right, state);
} else if (t.isCallExpression(node)) {
state.hasCall = true;
crawl(node.callee, state);
} else if (t.isFunction(node)) {
state.hasFunction = true;
} else if (t.isIdentifier(node)) {
state.hasHelper = state.hasHelper || isHelper(node.callee);
}
return state;
}
function isHelper(node) {
if (t.isMemberExpression(node)) {
return isHelper(node.object) || isHelper(node.property);
} else if (t.isIdentifier(node)) {
return node.name === "require" || node.name[0] === "_";
} else if (t.isCallExpression(node)) {
return isHelper(node.callee);
} else if (t.isBinary(node) || t.isAssignmentExpression(node)) {
return t.isIdentifier(node.left) && isHelper(node.left) || isHelper(node.right);
} else {
return false;
}
}
function isType(node) {
return t.isLiteral(node) || t.isObjectExpression(node) || t.isArrayExpression(node) || t.isIdentifier(node) || t.isMemberExpression(node);
}
const nodes = {
AssignmentExpression(node) {
const state = crawl(node.right);
if (state.hasCall && state.hasHelper || state.hasFunction) {
return {
before: state.hasFunction,
after: true
};
}
},
SwitchCase(node, parent) {
return {
before: node.consequent.length || parent.cases[0] === node,
after: !node.consequent.length && parent.cases[parent.cases.length - 1] === node
};
},
LogicalExpression(node) {
if (t.isFunction(node.left) || t.isFunction(node.right)) {
return {
after: true
};
}
},
Literal(node) {
if (node.value === "use strict") {
return {
after: true
};
}
},
CallExpression(node) {
if (t.isFunction(node.callee) || isHelper(node)) {
return {
before: true,
after: true
};
}
},
VariableDeclaration(node) {
for (let i = 0; i < node.declarations.length; i++) {
const declar = node.declarations[i];
let enabled = isHelper(declar.id) && !isType(declar.init);
if (!enabled) {
const state = crawl(declar.init);
enabled = isHelper(declar.init) && state.hasCall || state.hasFunction;
}
if (enabled) {
return {
before: true,
after: true
};
}
}
},
IfStatement(node) {
if (t.isBlockStatement(node.consequent)) {
return {
before: true,
after: true
};
}
}
};
exports.nodes = nodes;
nodes.ObjectProperty = nodes.ObjectTypeProperty = nodes.ObjectMethod = function (node, parent) {
if (parent.properties[0] === node) {
return {
before: true
};
}
};
nodes.ObjectTypeCallProperty = function (node, parent) {
if (parent.callProperties[0] === node && (!parent.properties || !parent.properties.length)) {
return {
before: true
};
}
};
nodes.ObjectTypeIndexer = function (node, parent) {
if (parent.indexers[0] === node && (!parent.properties || !parent.properties.length) && (!parent.callProperties || !parent.callProperties.length)) {
return {
before: true
};
}
};
nodes.ObjectTypeInternalSlot = function (node, parent) {
if (parent.internalSlots[0] === node && (!parent.properties || !parent.properties.length) && (!parent.callProperties || !parent.callProperties.length) && (!parent.indexers || !parent.indexers.length)) {
return {
before: true
};
}
};
const list = {
VariableDeclaration(node) {
return node.declarations.map(decl => decl.init);
},
ArrayExpression(node) {
return node.elements;
},
ObjectExpression(node) {
return node.properties;
}
};
exports.list = list;
[["Function", true], ["Class", true], ["Loop", true], ["LabeledStatement", true], ["SwitchStatement", true], ["TryStatement", true]].forEach(function ([type, amounts]) {
if (typeof amounts === "boolean") {
amounts = {
after: amounts,
before: amounts
};
}
[type].concat(t.FLIPPED_ALIAS_KEYS[type] || []).forEach(function (type) {
nodes[type] = function () {
return amounts;
};
});
});
|
/**
******************************************************************************
* @file spark_wiring_usbserial.h
* @author Satish Nair
* @version V1.0.0
* @date 13-March-2013
* @brief Header for spark_wiring_usbserial.c module
******************************************************************************
Copyright (c) 2013-2015 Particle Industries, Inc. All rights reserved.
Copyright (c) 2006 Nicholas Zambetti. All right reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation, either
version 3 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, see <http://www.gnu.org/licenses/>.
******************************************************************************
*/
#ifndef __SPARK_WIRING_USBSERIAL_H
#define __SPARK_WIRING_USBSERIAL_H
#include "spark_wiring_stream.h"
#include "usb_hal.h"
class USBSerial : public Stream
{
public:
// public methods
USBSerial();
unsigned int baud() { return USB_USART_Baud_Rate(); }
operator bool() { return baud()!=0; }
void begin(long speed);
void end();
int peek();
void Initialize();
void Process();
virtual size_t write(uint8_t byte);
virtual int read();
virtual int available();
virtual void flush();
using Print::write;
};
extern USBSerial Serial;
#endif
|
"use strict";
var helpers = require("../../helpers/helpers");
exports["Asia/Harbin"] = {
"guess:by:offset" : helpers.makeTestGuess("Asia/Harbin", { offset: true, expect: "Asia/Manila" }),
"guess:by:abbr" : helpers.makeTestGuess("Asia/Harbin", { abbr: true, expect: "Asia/Shanghai" }),
"1919" : helpers.makeTestYear("Asia/Harbin", [
["1919-04-12T15:59:59+00:00", "23:59:59", "CST", -480],
["1919-04-12T16:00:00+00:00", "01:00:00", "CDT", -540],
["1919-09-30T14:59:59+00:00", "23:59:59", "CDT", -540],
["1919-09-30T15:00:00+00:00", "23:00:00", "CST", -480]
]),
"1940" : helpers.makeTestYear("Asia/Harbin", [
["1940-05-31T15:59:59+00:00", "23:59:59", "CST", -480],
["1940-05-31T16:00:00+00:00", "01:00:00", "CDT", -540],
["1940-10-12T14:59:59+00:00", "23:59:59", "CDT", -540],
["1940-10-12T15:00:00+00:00", "23:00:00", "CST", -480]
]),
"1941" : helpers.makeTestYear("Asia/Harbin", [
["1941-03-14T15:59:59+00:00", "23:59:59", "CST", -480],
["1941-03-14T16:00:00+00:00", "01:00:00", "CDT", -540],
["1941-11-01T14:59:59+00:00", "23:59:59", "CDT", -540],
["1941-11-01T15:00:00+00:00", "23:00:00", "CST", -480]
]),
"1942" : helpers.makeTestYear("Asia/Harbin", [
["1942-01-30T15:59:59+00:00", "23:59:59", "CST", -480],
["1942-01-30T16:00:00+00:00", "01:00:00", "CDT", -540]
]),
"1945" : helpers.makeTestYear("Asia/Harbin", [
["1945-09-01T14:59:59+00:00", "23:59:59", "CDT", -540],
["1945-09-01T15:00:00+00:00", "23:00:00", "CST", -480]
]),
"1946" : helpers.makeTestYear("Asia/Harbin", [
["1946-05-14T15:59:59+00:00", "23:59:59", "CST", -480],
["1946-05-14T16:00:00+00:00", "01:00:00", "CDT", -540],
["1946-09-30T14:59:59+00:00", "23:59:59", "CDT", -540],
["1946-09-30T15:00:00+00:00", "23:00:00", "CST", -480]
]),
"1947" : helpers.makeTestYear("Asia/Harbin", [
["1947-04-14T15:59:59+00:00", "23:59:59", "CST", -480],
["1947-04-14T16:00:00+00:00", "01:00:00", "CDT", -540],
["1947-10-31T14:59:59+00:00", "23:59:59", "CDT", -540],
["1947-10-31T15:00:00+00:00", "23:00:00", "CST", -480]
]),
"1948" : helpers.makeTestYear("Asia/Harbin", [
["1948-04-30T15:59:59+00:00", "23:59:59", "CST", -480],
["1948-04-30T16:00:00+00:00", "01:00:00", "CDT", -540],
["1948-09-30T14:59:59+00:00", "23:59:59", "CDT", -540],
["1948-09-30T15:00:00+00:00", "23:00:00", "CST", -480]
]),
"1949" : helpers.makeTestYear("Asia/Harbin", [
["1949-04-30T15:59:59+00:00", "23:59:59", "CST", -480],
["1949-04-30T16:00:00+00:00", "01:00:00", "CDT", -540],
["1949-05-27T14:59:59+00:00", "23:59:59", "CDT", -540],
["1949-05-27T15:00:00+00:00", "23:00:00", "CST", -480]
]),
"1986" : helpers.makeTestYear("Asia/Harbin", [
["1986-05-03T17:59:59+00:00", "01:59:59", "CST", -480],
["1986-05-03T18:00:00+00:00", "03:00:00", "CDT", -540],
["1986-09-13T16:59:59+00:00", "01:59:59", "CDT", -540],
["1986-09-13T17:00:00+00:00", "01:00:00", "CST", -480]
]),
"1987" : helpers.makeTestYear("Asia/Harbin", [
["1987-04-11T17:59:59+00:00", "01:59:59", "CST", -480],
["1987-04-11T18:00:00+00:00", "03:00:00", "CDT", -540],
["1987-09-12T16:59:59+00:00", "01:59:59", "CDT", -540],
["1987-09-12T17:00:00+00:00", "01:00:00", "CST", -480]
]),
"1988" : helpers.makeTestYear("Asia/Harbin", [
["1988-04-16T17:59:59+00:00", "01:59:59", "CST", -480],
["1988-04-16T18:00:00+00:00", "03:00:00", "CDT", -540],
["1988-09-10T16:59:59+00:00", "01:59:59", "CDT", -540],
["1988-09-10T17:00:00+00:00", "01:00:00", "CST", -480]
]),
"1989" : helpers.makeTestYear("Asia/Harbin", [
["1989-04-15T17:59:59+00:00", "01:59:59", "CST", -480],
["1989-04-15T18:00:00+00:00", "03:00:00", "CDT", -540],
["1989-09-16T16:59:59+00:00", "01:59:59", "CDT", -540],
["1989-09-16T17:00:00+00:00", "01:00:00", "CST", -480]
]),
"1990" : helpers.makeTestYear("Asia/Harbin", [
["1990-04-14T17:59:59+00:00", "01:59:59", "CST", -480],
["1990-04-14T18:00:00+00:00", "03:00:00", "CDT", -540],
["1990-09-15T16:59:59+00:00", "01:59:59", "CDT", -540],
["1990-09-15T17:00:00+00:00", "01:00:00", "CST", -480]
]),
"1991" : helpers.makeTestYear("Asia/Harbin", [
["1991-04-13T17:59:59+00:00", "01:59:59", "CST", -480],
["1991-04-13T18:00:00+00:00", "03:00:00", "CDT", -540],
["1991-09-14T16:59:59+00:00", "01:59:59", "CDT", -540],
["1991-09-14T17:00:00+00:00", "01:00:00", "CST", -480]
])
};
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
sys.path.insert(0, 'lib')
import os
import math as m
import urllib2
import json
import MySQLdb
import time
import random
def dbLocs():
env = os.getenv('SERVER_SOFTWARE')
if (env and env.startswith('Google App Engine/')):
db = MySQLdb.connect(unix_socket='/cloudsql/smsante-query:smsantedb', user='root', db='sms_data')
sql = "SELECT location,latitude,longitude,type FROM outbreaks WHERE presence='Y';"
with db:
cur = db.cursor()
cur.execute(sql)
data = cur.fetchall()
outbreaks = {}
for loc in data:
outbreaks[loc[0]] = {'coords': (loc[1], loc[2]), 'type': loc[3]}
obKeys = outbreaks.keys()
return obKeys, outbreaks
def geoDate():
env = os.getenv('SERVER_SOFTWARE')
if (env and env.startswith('Google App Engine/')):
db = MySQLdb.connect(unix_socket='/cloudsql/smsante-query:smsantedb', user='root', db='sms_data')
sql = "SELECT report_time FROM outbreaks WHERE presence='Y';"
with db:
cur = db.cursor()
cur.execute(sql)
data = cur.fetchall()
dates = []
for loc in data:
dates.append(loc[0])
lastUpdate = max(dates)
return lastUpdate
def withinQuota():
env = os.getenv('SERVER_SOFTWARE')
if (env and env.startswith('Google App Engine/')):
db = MySQLdb.connect(unix_socket='/cloudsql/smsante-query:smsantedb', user='root', db='sms_input')
sql = "SELECT query_time FROM info_query WHERE query_time >= CURDATE();"
with db:
cur = db.cursor()
cur.execute(sql)
qtimes = len(cur.fetchall())
if qtimes < 2500:
return True
else:
return False
def getCoords(locInput):
loc = '+'.join(locInput.split(' '))
emptyResult = [unicode(locInput), {}, '', (0, 0), 'FAIL']
attempts = 0
success = False
while success != True and attempts <= 10:
attempts += 1
url = 'https://maps.googleapis.com/maps/api/geocode/json?address=' + loc + '+Africa'
response = urllib2.urlopen(url).read()
jsonres = json.loads(response)
status = jsonres['status']
if status == 'OK':
break
elif status == 'OVER_QUERY_LIMIT' and withinQuota():
time.sleep(random.random())
continue
elif status == 'OVER_QUERY_LIMIT' and not withinQuota():
emptyResult[-1] = 'OVER_QUERY_LIMIT'
return emptyResult
else:
return emptyResult
try:
coords = jsonres['results'][0]['geometry']['location']
addrcomp = jsonres['results'][0]['address_components']
levels = [addrcomp[n]['types'][0] for n in range(len(addrcomp))]
locs = [addrcomp[n]['long_name'] for n in range(len(addrcomp))]
locDict = dict(zip(locs, levels))
searchRes = addrcomp[0]['long_name']
except KeyError:
return emptyResult
return searchRes, locDict, levels[0], (coords['lat'], coords['lng']), 'OK'
def haversine((lat1, lng1), (lat2, lng2)):
lng1, lat1, lng2, lat2 = map(m.radians, [lng1, lat1, lng2, lat2])
dlng = lng2 - lng1
dlat = lat2 - lat1
a = m.sin(dlat/2)**2+m.cos(lat1)*m.cos(lat2)*m.sin(dlng/2)**2
distance = 6367*2*m.asin(m.sqrt(a)) #Radius=6367km
b = m.atan2(
m.sin(dlng)*m.cos(lat2),
m.cos(lat1)*m.sin(lat2)-m.sin(lat1)*m.cos(lat2)*m.cos(dlng)
)
bd = m.degrees(b)
turns, bearing = divmod(bd + 360, 360)
return distance, bearing
def NESW(br):
if br >= 355 or br <= 5:
return 'N'
elif br > 5 and br < 85:
return 'NE'
elif br >= 85 and br <= 95:
return 'E'
elif br > 95 and br < 175:
return 'SE'
elif br >= 175 and br <= 185:
return 'S'
elif br > 185 and br < 265:
return 'SW'
elif br >= 265 and br <= 275:
return 'W'
elif br > 275 and br < 355:
return 'NW'
def checkCoords(locInput):
searchRes, locDict, inputLevel, inputCoords, status = getCoords(locInput)
if status == 'OVER_QUERY_LIMIT':
return 'over', searchRes, '', '', 0, ''
if status == 'FAIL' or not locDict:
return 'none', searchRes, '', '', 0, ''
obKeys, outbreaks = dbLocs()
locInt = list(set(obKeys).intersection(locDict.keys()))
if locInt and status == 'OK':
matchLoc = locInt[0]
matchLevel = locDict[locInt[0]]
return 'exact', searchRes, matchLoc, matchLevel, 0, ''
elif not locInt and status == 'OK':
dcollect = []
for ob in obKeys:
matchCoords = outbreaks[ob]['coords']
distance, bearing = haversine(inputCoords, matchCoords)
dcollect.append(distance)
outbreaks[ob]['dist'] = distance
outbreaks[ob]['dir'] = NESW(bearing)
ind = dcollect.index(min(dcollect))
matchLoc = unicode(obKeys[ind])
matchLevel = unicode(outbreaks[matchLoc]['type'])
matchDist = outbreaks[matchLoc]['dist']
matchDir = outbreaks[matchLoc]['dir']
return 'closest', searchRes, matchLoc, matchLevel, matchDist, matchDir
else:
return 'none', searchRes, '', '', 0, '' # redundant, just in case
if __name__ == '__main__':
print checkCoords('Lokolia')
print checkCoords('Mamou')
print checkCoords('Complete Nonsense')
|
'use strict';
var resolve = require('helper-resolve');
/**
* Return a property from the package.json of the specified module.
* _(The module must be installed in `node_modules`)_.
*
* ```js
* {%%= resolve("micromatch") %}
* //=> 'node_modules/micromatch/index.js'
*
* {%%= resolve("micromatch", "version") %}
* //=> '2.2.0'
* ```
*
* @param {String} `fp` The path of the file to include.
* @param {String} `options`
* @option {String} `name` Replace `./` in `require('./')` with the given name.
* @return {String}
*/
module.exports = function resolve_(name, key) {
return resolve.sync(name)[typeof key === 'string' ? key : 'main'];
};
|
/*
* Copyright 2015-2016 DevCon5 GmbH, [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.inkstand.scribble.security;
import java.security.Principal;
/**
* A Principal to be used in test that carries a username information.
*
* @author <a href="mailto:[email protected]">Gerald Mücke</a>
*
*/
public class SimpleUserPrincipal implements Principal {
private final String name;
public SimpleUserPrincipal(final String name) {
super();
this.name = name;
}
@Override
public String getName() {
return this.name;
}
}
|
"""
TWLight URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/ref/urls/
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.admindocs import urls as admindocs
from django.contrib.auth import views as auth_views
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from django.views.decorators.cache import cache_page
from TWLight.api.urls import urlpatterns as api_urls
from TWLight.applications.urls import urlpatterns as applications_urls
from TWLight.emails.views import ContactUsView
from TWLight.graphs.urls import csv_urlpatterns as csv_urls
from TWLight.graphs.views import DashboardView
from TWLight.resources.urls import urlpatterns as partners_urls
from TWLight.resources.views import (
PartnerSuggestionView,
SuggestionDeleteView,
SuggestionUpvoteView,
)
from TWLight.users import oauth as auth
from TWLight.users.urls import urlpatterns as users_urls
from TWLight.users.views import TermsView
from TWLight.ezproxy.urls import urlpatterns as ezproxy_urls
from .views import HomePageView, NewHomePageView
handler400 = "TWLight.views.bad_request"
urlpatterns = [
# Built-in -----------------------------------------------------------------
url(r"^admin/doc", include(admindocs)),
url(r"^admin/", admin.site.urls),
url(r"^accounts/login/", auth_views.LoginView.as_view(), name="auth_login"),
url(
r"^accounts/logout/",
auth_views.LogoutView.as_view(),
{"next_page": "/"},
name="auth_logout",
),
url(
r"^password/change/$",
auth_views.PasswordChangeView.as_view(),
{"post_change_redirect": "users:home"},
name="password_change",
),
url(
r"^password/reset/$",
auth_views.PasswordResetView.as_view(),
{"post_reset_redirect": "users:home"},
name="password_reset",
),
# Third-party --------------------------------------------------------------
url(r"^comments/", include("django_comments.urls")),
# TWLight apps -------------------------------------------------------------
# This makes our custom set language form available.
url(r"^i18n/", include("TWLight.i18n.urls")),
url(r"^api/", include((api_urls, "api"), namespace="api")),
url(r"^users/", include((users_urls, "users"), namespace="users")),
url(
r"^applications/",
include((applications_urls, "applications"), namespace="applications"),
),
url(r"^partners/", include((partners_urls, "resources"), namespace="partners")),
url(r"^csv/", include((csv_urls, "graphs"), namespace="csv")),
url(r"^ezproxy/", include((ezproxy_urls, "ezproxy"), namespace="ezproxy")),
# Other TWLight views
url(r"^oauth/login/$", auth.OAuthInitializeView.as_view(), name="oauth_login"),
url(r"^oauth/callback/$", auth.OAuthCallbackView.as_view(), name="oauth_callback"),
url(r"^dashboard/$", DashboardView.as_view(), name="dashboard"),
url(r"^terms/$", TermsView.as_view(), name="terms"),
# For partner suggestions
url(r"^suggest/$", PartnerSuggestionView.as_view(), name="suggest"),
url(
r"^suggest/(?P<pk>[0-9]+)/delete/$",
login_required(SuggestionDeleteView.as_view()),
name="suggest-delete",
),
url(
r"^suggest/(?P<pk>[0-9]+)/upvote/$",
login_required(SuggestionUpvoteView.as_view()),
name="upvote",
),
# For contact us form
url(r"^contact/$", ContactUsView.as_view(), name="contact"),
url(r"^$", HomePageView.as_view(), name="homepage"),
url(r"^about/$", TemplateView.as_view(template_name="about.html"), name="about"),
url(r"^homepage/$", NewHomePageView.as_view(), name="new_homepage"),
]
|
'use strict';
var fs = require('fs');
var crypto = require('crypto');
/**
* Create the object to build a Session instance
*
* @constructor
* @param {String} fp path for a JSON file
*/
var Session = function Session() {
this.lifetime = 604800000; // 1 week in miliseconds
this.cronclean = Date.now() + this.lifetime;
this.sessions = {};
};
/**
* Define JSON file where the sessions will be saved
*
* @param {String} fp path for a JSON file
* @return {Object} Session instance
*/
Session.prototype.setJSONfile = function (fp) {
if (fp)
{
this.fp = fp;
var fd = fs.open(fp, 'r', (function(err, fd) {
if (!err)
{
fs.closeSync(fd);
this.sessions = require(fp);
}
else
fs.writeFileSync(fp, JSON.stringify({}));
}).bind(this));
}
return this;
};
/**
* Define session lifetime
*
* @param {Number} lifetime in microseconds
* @return {Object} Session instance
*/
Session.prototype.setLifetime = function (lifetime) {
if (lifetime)
this.lifetime = lifetime;
return this;
};
/**
* Kill a session
*
* @param {String} hash private-key
* @return {Object} Session instance
*/
Session.prototype.kill = function(hash) {
delete this.sessions[hash];
this.writeSync();
return this;
};
/**
* Check if a key hash is a valid session
*
* @param {String} hash private-key
* @param {function} next callback(res)
*/
Session.prototype.exists = function(hash, next) {
this.clean((function() {
if (typeof this.sessions[hash] === "number")
{
var now = Date.now();
if (this.sessions[hash] - 3600000 > now)
next(true, hash, this.sessions[hash]);
else if (this.sessions[hash] < now)
next(false);
else
{
// re-initialize the session
this.kill(hash);
this.secure(function(newHash, expires) {
return next(true, newHash, expires);
});
}
}
else
next(false);
}).bind(this));
};
/**
* Create a new secure session
*
* @param {function} next callback(privateKey, expires)
*/
Session.prototype.secure = function(next) {
crypto.randomBytes(64, (function (sex, buf) {
var hash = buf.toString('base64');
this.sessions[hash] = Date.now() + this.lifetime;
this.writeSync();
next(hash, this.sessions[hash]);
}).bind(this));
};
/**
* Clean expired sessions
*
* @param {function} next callback()
*/
Session.prototype.clean = function(next) {
var now = Date.now();
if (this.cronclean <= now)
{
for (var prop in this.sessions)
{
if (this.sessions.hasOwnProperty(prop))
{
if (this.sessions[prop] <= this.cronclean)
delete this.sessions[prop];
}
}
this.cronclean = now + this.lifetime;
this.writeSync();
next();
}
else
next();
};
/**
* Save sessions in the JSON file
*
*/
Session.prototype.writeSync = function() {
if (this.fp)
fs.writeFileSync(this.fp, JSON.stringify(this.sessions));
};
/**
* Get a Session instance
*
* @singleton
*/
module.exports = new Session();
|
# -*- coding: utf-8 -*-
# @Time : 2017/8/2 10:06
# @Author : play4fun
# @File : test1-opencv-python.py
# @Software: PyCharm
"""
test-opencv-python.py:
"""
import numpy as np
import cv2
from matplotlib import pyplot as plt
print(cv2.__version__, cv2.__doc__)
img = cv2.imread('../../data/messi5.jpg', cv2.IMREAD_UNCHANGED) # 包括图像的 alpha 通道
rows, cols, ch = img.shape
print('行/高:', rows, '列/宽:', cols, '通道:', ch)
img = cv2.resize(img, (640, 480))
rows, cols, ch = img.shape
print('行/高:', rows, '列/宽:', cols, '通道:', ch)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)
#
# cv2.imshow('thresh', thresh) #不支持
# cv2.waitKey(0)
'''
cv2.error: /Users/travis/build/skvark/opencv-python/opencv/modules/highgui/src/window.cpp:583: error: (-2) The function is not implemented. Rebuild the library with Windows, GTK+ 2.x or Carbon support. If you are on Ubuntu or Debian, install libgtk2.0-dev and pkg-config, then re-run cmake or configure script in function cvShowImage
'''
#使用matplotlib来显示
plt.imshow(thresh,cmap='gray')
plt.show()
#可以把结果写入jpg文件
# cv2.imwrite('messi5-gray.jpg', gray)
# cv2.imwrite('messi5-thresh.jpg', thresh)
|
/*
* citygml4j - The Open Source Java API for CityGML
* https://github.com/citygml4j
*
* Copyright 2013-2022 Claus Nagel <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.citygml4j.model.gml.geometry.aggregates;
import org.citygml4j.builder.copy.CopyBuilder;
import org.citygml4j.geometry.BoundingBox;
import org.citygml4j.model.common.base.ModelObjects;
import org.citygml4j.model.common.child.ChildList;
import org.citygml4j.model.common.visitor.GMLFunctor;
import org.citygml4j.model.common.visitor.GMLVisitor;
import org.citygml4j.model.common.visitor.GeometryFunctor;
import org.citygml4j.model.common.visitor.GeometryVisitor;
import org.citygml4j.model.gml.GMLClass;
import org.citygml4j.model.gml.geometry.primitives.Polygon;
import org.citygml4j.model.gml.geometry.primitives.PolygonProperty;
import java.util.Arrays;
import java.util.List;
public class MultiPolygon extends AbstractGeometricAggregate {
private List<PolygonProperty> polygonMember;
public MultiPolygon() {
}
public MultiPolygon(List<Polygon> polygons) {
for (Polygon polygon : polygons)
addPolygonMember(new PolygonProperty(polygon));
}
public MultiPolygon(Polygon... polygons) {
this(Arrays.asList(polygons));
}
public void addPolygonMember(PolygonProperty polygonMember) {
getPolygonMember().add(polygonMember);
}
public List<PolygonProperty> getPolygonMember() {
if (polygonMember == null)
polygonMember = new ChildList<>(this);
return polygonMember;
}
public boolean isSetPolygonMember() {
return polygonMember != null && !polygonMember.isEmpty();
}
public void setPolygonMember(List<PolygonProperty> polygonMember) {
this.polygonMember = new ChildList<>(this, polygonMember);
}
public void unsetPolygonMember() {
polygonMember = ModelObjects.setNull(polygonMember);
}
public boolean unsetPolygonMember(PolygonProperty polygonMember) {
return isSetPolygonMember() && this.polygonMember.remove(polygonMember);
}
public BoundingBox calcBoundingBox() {
BoundingBox bbox = new BoundingBox();
if (isSetPolygonMember()) {
for (PolygonProperty polygonProperty : getPolygonMember())
if (polygonProperty.isSetPolygon())
bbox.update(polygonProperty.getPolygon().calcBoundingBox());
}
return bbox;
}
public GMLClass getGMLClass() {
return GMLClass.MULTI_POLYGON;
}
public Object copy(CopyBuilder copyBuilder) {
return copyTo(new MultiPolygon(), copyBuilder);
}
@Override
public Object copyTo(Object target, CopyBuilder copyBuilder) {
MultiPolygon copy = (target == null) ? new MultiPolygon() : (MultiPolygon)target;
super.copyTo(copy, copyBuilder);
if (isSetPolygonMember()) {
for (PolygonProperty part : polygonMember) {
PolygonProperty copyPart = (PolygonProperty)copyBuilder.copy(part);
copy.addPolygonMember(copyPart);
if (part != null && copyPart == part)
part.setParent(this);
}
}
return copy;
}
public void accept(GeometryVisitor visitor) {
visitor.visit(this);
}
public <T> T accept(GeometryFunctor<T> visitor) {
return visitor.apply(this);
}
public void accept(GMLVisitor visitor) {
visitor.visit(this);
}
public <T> T accept(GMLFunctor<T> visitor) {
return visitor.apply(this);
}
}
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Post.author'
db.add_column(u'blog_post', 'author',
self.gf('django.db.models.fields.related.ForeignKey')(default=1, related_name='posts', to=orm['auth.User']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Post.author'
db.delete_column(u'blog_post', 'author_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'blog.post': {
'Meta': {'object_name': 'Post'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': u"orm['auth.User']"}),
'content': ('django.db.models.fields.TextField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['blog']
|
# -*- coding: utf-8 -*-
from camera import PerspectiveCamera
from mesh import fromObjFile
from light import PointLight, TubeLight
from game import Game
from display import Display
import signal
import struct
import sys
from PIL import Image
import datetime
import time
def save_screenshot(raw_data, width=int(Display.width), height=int(Display.height), ext='png'):
ts = datetime.datetime.fromtimestamp(time.time()).strftime("%Y-%m-%d_%H-%M-%S")
image = Image.frombytes('RGB', (width, height), raw_data)
image.save("screenshot_%s.%s" % (ts, ext), ext)
class MyGame(Game):
def __init__(self):
Game.__init__(self)
camera = PerspectiveCamera([0.0, 0.0, 0.0], near=0.001, far=500.0, fov=90)
self.lion = fromObjFile("res/models/lion.obj", camera, position = [0, -10, 50], scale = 40.0)
self.add(self.lion)
colorWhite = [1.0, 1.0, 1.0]
colorBlue = [0.0, 0.0, 1.0]
colorGreen = [0.0, 1.0, 0.0]
intensityWhite = 10.0
intensityColor = 15.0
self.add(PointLight(camera, [-50, -10, 0], colorWhite, intensityWhite ))
self.add(PointLight(camera, [50, -10, 0], colorWhite, intensityWhite ))
self.add(PointLight(camera, [-10,50,50], colorBlue, intensityColor ))
self.add(PointLight(camera, [10,50,50], colorGreen, intensityColor ))
self.laser = TubeLight(camera, [-50, 18.0, 25], [1.0, 0, 0], 10.0, 10.0, [1, 0, 0], 100)
self.add(self.laser)
def handle_sigint(signum, frame):
self.stop()
signal.signal(signal.SIGINT, handle_sigint)
def update(self, overruns):
if 'KEY_F4' in self.events:
try:
save_screenshot(self.screenshot(0,0, int(Display.width), int(Display.height)))
except:
print sys.exc_info()
if 'KEY_ESC' in self.events:
self.stop()
self.lion.rotation = [0, (self.lion.rotation[1] + 1 + overruns) % 360, 0]
laserpos = self.laser.position[0]+2
if laserpos > 50:
laserpos = -50
self.laser.position = [laserpos, self.laser.position[1], self.laser.position[2]]
game = MyGame()
game.start()
|
package org.apache.bcel.generic;
/* ====================================================================
* The Apache Software License, Version 1.1
*
* Copyright (c) 2001 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Apache" and "Apache Software Foundation" and
* "Apache BCEL" must not be used to endorse or promote products
* derived from this software without prior written permission. For
* written permission, please contact [email protected].
*
* 5. Products derived from this software may not be called "Apache",
* "Apache BCEL", nor may "Apache" appear in their name, without
* prior written permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
/**
* Denote an instruction that may consume a value from the stack.
*
* @version $Id: StackConsumer.java,v 1.1 2005/12/16 14:11:25 andos Exp $
* @author <A HREF="mailto:[email protected]">M. Dahm</A>
*/
public interface StackConsumer {
/** @return how many words are consumed from stack
*/
public int consumeStack(ConstantPoolGen cpg);
}
|
<?php
namespace Hook\Middlewares;
use Slim;
class AuthMiddleware extends Slim\Middleware
{
public function call()
{
$app = $this->app;
$request_path = $app->request->getResourceUri();
// if (!$app->key && strpos($app->request->getPath(), "/apps/") === false) {
// $app->response->setStatus(403);
// $app->response->setBody(json_encode(array('error' => "Invalid credentials.")));
// return;
// }
return $this->next->call();
}
}
|
/*
* RCHSentence.cpp
*
* (C) Copyright 2016 Pavel Bobov.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <string.h>
#include <stdlib.h>
#include "Sentences.h"
#include "StrUtils.h"
PWMSentence::PWMSentence() :
Sentence(TALKER_RC, TAG_PWM),
channel(1), pulse(0) {
}
PWMSentence::~PWMSentence() {
}
char* PWMSentence::get(char str[], size_t buflen) const {
if (str == NULL || buflen < MAX_SENTENCE_LENGTH)
return NULL;
addHead(str);
strcat(str, ",");
char* p = strend(str);
if (channel >= 0) {
ltoa2(channel, p, 10);
}
strcat(str, ",");
p = strend(str);
if (pulse >= 0) {
ltoa2(pulse, p, 10);
}
return addChecksum(str);
}
bool PWMSentence::set(const char str[]) {
if (!valid(str))
return false;
if (!matches(str))
return false;
const char *p = str;
// get time
p = nextToken(p);
if (',' != *p)
channel = atoi(p);
else
channel = -1;
p = nextToken(p);
if (',' != *p)
pulse = atof(p);
else
pulse = -1;
return true;
}
|
#define CONFIG_FONT_8x16 1
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 Zadara Storage Inc.
# Copyright (c) 2011 OpenStack LLC.
# Copyright 2011 University of Southern California
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for volume types extra specs code
"""
from cinder import context
from cinder import db
from cinder import exception
from cinder import test
class VolumeGlanceMetadataTestCase(test.TestCase):
def setUp(self):
super(VolumeGlanceMetadataTestCase, self).setUp()
self.context = context.get_admin_context()
def tearDown(self):
super(VolumeGlanceMetadataTestCase, self).tearDown()
def test_vol_glance_metadata_bad_vol_id(self):
ctxt = context.get_admin_context()
self.assertRaises(exception.VolumeNotFound,
db.volume_glance_metadata_create,
ctxt, 1, 'key1', 'value1')
self.assertRaises(exception.VolumeNotFound,
db.volume_glance_metadata_get, ctxt, 1)
db.volume_glance_metadata_delete_by_volume(ctxt, 10)
def test_vol_update_glance_metadata(self):
ctxt = context.get_admin_context()
db.volume_create(ctxt, {'id': 1})
db.volume_create(ctxt, {'id': 2})
vol_metadata = db.volume_glance_metadata_create(ctxt, 1, 'key1',
'value1')
vol_metadata = db.volume_glance_metadata_create(ctxt, 2, 'key1',
'value1')
vol_metadata = db.volume_glance_metadata_create(ctxt, 2,
'key2',
'value2')
expected_metadata_1 = {'volume_id': '1',
'key': 'key1',
'value': 'value1'}
metadata = db.volume_glance_metadata_get(ctxt, 1)
self.assertEqual(len(metadata), 1)
for key, value in expected_metadata_1.items():
self.assertEqual(metadata[0][key], value)
expected_metadata_2 = ({'volume_id': '2',
'key': 'key1',
'value': 'value1'},
{'volume_id': '2',
'key': 'key2',
'value': 'value2'})
metadata = db.volume_glance_metadata_get(ctxt, 2)
self.assertEqual(len(metadata), 2)
for expected, meta in zip(expected_metadata_2, metadata):
for key, value in expected.iteritems():
self.assertEqual(meta[key], value)
self.assertRaises(exception.GlanceMetadataExists,
db.volume_glance_metadata_create,
ctxt, 1, 'key1', 'value1a')
metadata = db.volume_glance_metadata_get(ctxt, 1)
self.assertEqual(len(metadata), 1)
for key, value in expected_metadata_1.items():
self.assertEqual(metadata[0][key], value)
def test_vol_delete_glance_metadata(self):
ctxt = context.get_admin_context()
db.volume_create(ctxt, {'id': 1})
db.volume_glance_metadata_delete_by_volume(ctxt, 1)
vol_metadata = db.volume_glance_metadata_create(ctxt, 1, 'key1',
'value1')
db.volume_glance_metadata_delete_by_volume(ctxt, 1)
metadata = db.volume_glance_metadata_get(ctxt, 1)
self.assertEqual(len(metadata), 0)
db.volume_glance_metadata_delete_by_volume(ctxt, 1)
metadata = db.volume_glance_metadata_get(ctxt, 1)
self.assertEqual(len(metadata), 0)
def test_vol_glance_metadata_copy_to_snapshot(self):
ctxt = context.get_admin_context()
db.volume_create(ctxt, {'id': 1})
db.snapshot_create(ctxt, {'id': 100, 'volume_id': 1})
vol_meta = db.volume_glance_metadata_create(ctxt, 1, 'key1',
'value1')
db.volume_glance_metadata_copy_to_snapshot(ctxt, 100, 1)
expected_meta = {'snapshot_id': '100',
'key': 'key1',
'value': 'value1'}
for meta in db.volume_snapshot_glance_metadata_get(ctxt, 100):
for (key, value) in expected_meta.items():
self.assertEquals(meta[key], value)
|
/*
===============================================================================
FILE: laszip_common_v2.hpp
CONTENTS:
Common defines and functionalities for version 2 of LASitemReadCompressed
and LASitemwriteCompressed.
PROGRAMMERS:
[email protected] - http://rapidlasso.com
COPYRIGHT:
(c) 2007-2012, martin isenburg, rapidlasso - tools to catch reality
This is free software; you can redistribute and/or modify it under the
terms of the GNU Lesser General Licence as published by the Free Software
Foundation. See the COPYING file for more information.
This software is distributed WITHOUT ANY WARRANTY and without even the
implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
CHANGE HISTORY:
16 March 2011 -- created after designing the "streaming median" algorithm
===============================================================================
*/
#ifndef LASZIP_COMMON_V2_HPP
#define LASZIP_COMMON_V2_HPP
class StreamingMedian5
{
public:
I32 values[5];
BOOL high;
void init()
{
values[0] = values[1] = values[2] = values[3] = values[4] = 0;
high = true;
}
inline void add(I32 v)
{
if (high)
{
if (v < values[2])
{
values[4] = values[3];
values[3] = values[2];
if (v < values[0])
{
values[2] = values[1];
values[1] = values[0];
values[0] = v;
}
else if (v < values[1])
{
values[2] = values[1];
values[1] = v;
}
else
{
values[2] = v;
}
}
else
{
if (v < values[3])
{
values[4] = values[3];
values[3] = v;
}
else
{
values[4] = v;
}
high = false;
}
}
else
{
if (values[2] < v)
{
values[0] = values[1];
values[1] = values[2];
if (values[4] < v)
{
values[2] = values[3];
values[3] = values[4];
values[4] = v;
}
else if (values[3] < v)
{
values[2] = values[3];
values[3] = v;
}
else
{
values[2] = v;
}
}
else
{
if (values[1] < v)
{
values[0] = values[1];
values[1] = v;
}
else
{
values[0] = v;
}
high = true;
}
}
}
I32 get() const
{
return values[2];
}
StreamingMedian5()
{
init();
}
};
// for LAS files with the return (r) and the number (n) of
// returns field correctly populated the mapping should really
// be only the following.
// { 15, 15, 15, 15, 15, 15, 15, 15 },
// { 15, 0, 15, 15, 15, 15, 15, 15 },
// { 15, 1, 2, 15, 15, 15, 15, 15 },
// { 15, 3, 4, 5, 15, 15, 15, 15 },
// { 15, 6, 7, 8, 9, 15, 15, 15 },
// { 15, 10, 11, 12, 13, 14, 15, 15 },
// { 15, 15, 15, 15, 15, 15, 15, 15 },
// { 15, 15, 15, 15, 15, 15, 15, 15 }
// however, some files start the numbering of r and n with 0,
// only have return counts r, or only have number of return
// counts n, or mix up the position of r and n. we therefore
// "complete" the table to also map those "undesired" r & n
// combinations to different contexts
const U8 number_return_map[8][8] =
{
{ 15, 14, 13, 12, 11, 10, 9, 8 },
{ 14, 0, 1, 3, 6, 10, 10, 9 },
{ 13, 1, 2, 4, 7, 11, 11, 10 },
{ 12, 3, 4, 5, 8, 12, 12, 11 },
{ 11, 6, 7, 8, 9, 13, 13, 12 },
{ 10, 10, 11, 12, 13, 14, 14, 13 },
{ 9, 10, 11, 12, 13, 14, 15, 14 },
{ 8, 9, 10, 11, 12, 13, 14, 15 }
};
// for LAS files with the return (r) and the number (n) of
// returns field correctly populated the mapping should really
// be only the following.
// { 0, 7, 7, 7, 7, 7, 7, 7 },
// { 7, 0, 7, 7, 7, 7, 7, 7 },
// { 7, 1, 0, 7, 7, 7, 7, 7 },
// { 7, 2, 1, 0, 7, 7, 7, 7 },
// { 7, 3, 2, 1, 0, 7, 7, 7 },
// { 7, 4, 3, 2, 1, 0, 7, 7 },
// { 7, 5, 4, 3, 2, 1, 0, 7 },
// { 7, 6, 5, 4, 3, 2, 1, 0 }
// however, some files start the numbering of r and n with 0,
// only have return counts r, or only have number of return
// counts n, or mix up the position of r and n. we therefore
// "complete" the table to also map those "undesired" r & n
// combinations to different contexts
const U8 number_return_level[8][8] =
{
{ 0, 1, 2, 3, 4, 5, 6, 7 },
{ 1, 0, 1, 2, 3, 4, 5, 6 },
{ 2, 1, 0, 1, 2, 3, 4, 5 },
{ 3, 2, 1, 0, 1, 2, 3, 4 },
{ 4, 3, 2, 1, 0, 1, 2, 3 },
{ 5, 4, 3, 2, 1, 0, 1, 2 },
{ 6, 5, 4, 3, 2, 1, 0, 1 },
{ 7, 6, 5, 4, 3, 2, 1, 0 }
};
#endif
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.