prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
from .resource import Resource |
"""
contentful.locale
~~~~~~~~~~~~~~~~~
This module implements the Locale class.
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/localization
:copyright: (c) 2016 by Contentful GmbH.
:license: MIT, see LICENSE for more details.
"""
clas | s Locale(Resource):
"""
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/localization
"""
def __init__(self, item, **kwargs):
super(Locale, self).__init__(item, **kwargs)
self.code = item.get('code', '')
self.name = item.get('... |
# Copyright 2001 by Tarjei Mikkelsen. All rights reserved.
# Revisions copyright 2007 by Michiel de Hoon. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Tests the basic funct... | e
from Bio.KEGG import Compound
from Bio.KEGG import Map
from Bio.Pathway import System
# TODO - use unittest instead of print-and-compare testing
test_KEGG_Enzyme_files = ["enzyme.sample", "enzyme.irregular", "enzyme.new"]
test_KEGG_Compound_files = ["compound.sample", "compound.irregular"]
test | _KEGG_Map_files = ["map00950.rea"]
def t_KEGG_Enzyme(testfiles):
"""Tests Bio.KEGG.Enzyme functionality."""
for file in testfiles:
fh = open(os.path.join("KEGG", file))
print("Testing Bio.KEGG.Enzyme on " + file + "\n\n")
records = Enzyme.parse(fh)
for record in records:
... |
#!/usr/bin/env python
"""
Created Wed Oct 7 15:04:36 CEST 2015
@author: sapfo
"""
import matplotlib
#matplotlib.use('Agg')
import simul_ms
import python_cmdscale
#import python_pca
import exp
import sys
import numpy as np
import pylab as py
from scipy.stats import norm
'''
We want to pick n1, n2, D, T?
Simulate ... | ed total tree length, bias, rmse
t_mds = (2./(np.average(evals_mds[:-1])))**(1/2.)
T_mds[nsnp].append(t_mds)
if verbose: print "expected T (mds) from eigenvalues: ",T_mds
# pca expected tree length, bias, rmse
#t_pca = 1./np.average(evals_pca[:-1])
#T_pca[nsnp].append(t... | envalues: ",T_pca
print "expected lambda1 (mds) for (Ivan analytical): ",2./((exp_tree_length)**2)
#print "expected lambda1 (pca) for (Ivan analytical): ",1./((exp_tree_length))
#print "observed lambda1 (mds procedure): ",evals_mds[0]
#print "observed lambda1 (pca procedure): ",evals_pca[0]
#pri... |
TIONS` or `RADIO_SHOWS`.
start -- Which number to start the retrieval from. Used for paging.
max_items -- The total number of results to return.
"""
if favorite_type != RADIO_SHOWS or RADIO_STATIONS:
favorite_type = RADIO_STATIONS
response = self.contentDirectory.Br... | or item in result if item.__class__ == DidlMusicAlbum]
# It is necessary to update the list of items in two places, due to
# a bug in SearchResult
result[:] = reduced
result._metadata.update({
'item_list': reduced,
'search_type': 'albums_for_artist',
'... | ': len(reduced)
})
return result
def get_tracks_for_album(self, artist, album, full_album_art_uri=False):
"""Get tracks for an artist's album.
:param artist: Artist name
:type artist: str
:param album: Album name
:type album: str
:param full_album_ar... |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
depende | ncies = [
]
operations = [
migrations.CreateModel(
name='Registry',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
(... | odels.IntegerField()),
('visibility', models.CharField(default=b'Unpusblished', max_length=20, choices=[(b'Published', b'Published'), (b'Unpusblished', b'Unpusblished')])),
('title', models.CharField(max_length=50)),
('alt_text', models.CharField(max_length=200)),
... |
ount.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
routers,
None,
"List routers should return empty response"
)
self.debug(
"Deploying another instance (startv... | self.skipTest("RBD storage type is required for data volumes for LXC")
self.apiclient = self.testClient.getApiClient()
| self.dbclient = self.testClient.getDbConnection()
self.testdata["virtual_machine"]["zoneid"] = self.zone.id
self.testdata["iso"]["zoneid"] = self.zone.id
self.testdata["virtual_machine"]["template"] = self.template.id
self.account = Account.create(
self.apiclient,
... |
from rtruffle | .abstract_node import AbstractNode, NodeInitializeMetaClass
class BaseNode(AbstractNode):
__metaclass__ = NodeInitializeMetaClass
_immutable_fields_ = ['_source_sec | tion', '_parent']
_child_nodes_ = []
|
ks
if (private and not self.isPublic(cb)) or
(not private and self.isPublic(cb))]
names.sort()
if names:
irc.reply(format('%L', names))
else:
if private:
i... | commands.setdefault(command, []).append(cb.name())
for (key, names) in commands.items():
for name in names:
L.append('%s %s' % (name, key))
if L:
L.sort()
irc.reply(format('%L', L))
else:
irc.reply(_('No appropriate command... | "[<plugin>] [<command>]
This command gives a useful description of what <command> does.
<plugin> is only necessary if the command is in more than one plugin.
You may also want to use the 'list' command to list all available
plugins and commands.
"""
if not command:
... |
#!/usr/bin/env python2
import os
from credit import main, exce
from credit import jsonhelper as jh
from credit.tests import testData as td
import unittest
class Test_total_all_net(unittest.TestCase):
def setUp(self):
self.fnum = 10
self.days = 10
self.startname = 'test_display'
s... | f.write(jh.dict_to_json(fakeDict))
def test_total_all(self):
num_files = 0
totals = 0
f | or sheetname, total in main.total_all():
self.assertTrue((sheetname + main.SHEETEXT) in self.files)
num_files += 1
totals += total
self.assertEqual(num_files, self.fnum)
self.assertTrue(abs(totals - self.bal) < td.ERROR)
def test_net(self):
self.assertTru... |
uild_column('birth', DateCol(default = date(1970, 1, 1), is_permanent = True, label = u"Date de naissance"))
build_column('adoption', BoolCol(entity = "ind", label = u"Enfant adopté"))
build_column('alt', BoolCol(label = u'Enfant en garde alternée')) # TODO: cerfa_field
build_column('activite', EnumCol(label = u'... | column = BoolCol(default = False)
entity_class = Familles
label = u"maries"
def function(self, simulation, period):
"""couple = 1 si couple marié sinon 0 TODO: faire un choix avec | couple ?"""
# Note : Cette variable est "instantanée" : quelque soit la période demandée, elle retourne la valeur au premier
# jour, sans changer la période.
statmarit_holder = simulation.compute('statmarit', period)
statmarit = self.filter_role(statmarit_holder, role = CHEF)
r... |
from __future__ import absolute_import
import re
import math
import contextlib
from .LineTransformProcessor import LineTransformProcessor
import makerbot_driver
class AnchorProcessor(LineTransformProcessor):
def __init__(self):
super(AnchorProcessor, self).__init__()
self.is_bundleable = True
... | s = "G1 X%s Y%s Z%s F3300.0 (move to waiting position)"
start_codes = start_codes % | start_position
return start_codes
|
from django_nose.tools import assert_false, assert_true
from pontoon.base.tests import TestCase
from pontoon.base.utils import extensi | on_in
class UtilsTests(TestCase):
def test_extension_in(self):
assert_true(extension_in('filename.txt', ['bat', 'txt']))
assert_true(extension_in('filename.biff', ['biff']))
assert_true(extension_in('filename.tar.gz', ['gz']))
assert_false(extension_in('filename.txt', ['png', 'jpg... | ks.
assert_false(extension_in('filename.tar.gz', ['tar.gz']))
|
from . animation import Animation
from .. layout import strip
class Strip(Animation):
LAYOUT_CLASS = strip.Strip
LAYOUT_ARGS = 'num',
def __init__(self, layout, start=0, end=-1, **kwds):
super().__init__(layout, **kwds)
sel | f._start = max(start, 0)
self._end = end
if self._end < 0 or self._end >= self.layout.numLEDs:
self._end = self.layout.numLEDs - 1
self._size = self._end - self._start + 1
from .. import deprecated
if depr | ecated.allowed():
BaseStripAnim = Strip
|
job_tags = 'foobar',
skip_tags = 'barfoo',
ask_variables_on_launch=on_off,
ask_tags_on_launch=on_off,
ask_skip_tags_on_launch=on_off,
ask_job_type_on_launch=on_off,
ask_inventory_on_launch=on_off,
ask_limit_on_launch=on_off,
... | edentials'] = set(Credential.objects.get(pk=_id) for _id in data['credentials'])
if 'inventory' in data:
internal['inventory'] = Inventory.objects.get(pk=data['inventory'])
return internal
# End of | setup, tests start here
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, admin_user, mocker):
job_template = job_template_prompts(False)
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
with mocker.patch.object... |
from py2neo.server import GraphServer
from py2neo import Node,Relationship
HISTO_LENGTH = 5
def insert(sentence, tokensAndType):
"""
Take a sentence and it's associate tokens and type and store all of it in the db as the last sentence of the dialogue
@type sentence: string
@param sentence: The inser... | erver("../../../neo4j")
graph=server.graph
# Retrieve all the sentences of the dialogue
sentences = graph.cypher.execute("MATCH (n:Histo)-[r*0..5]->(st:SentenceHisto) RETURN st")
print sentences
numberOfSentences = len(sentences)
# Create a node to insert as the last sentence of the dialogue
sentence = Node("S... | entence=sentence)
sentenceType = graph.find_one("SentenceType",
property_key="label",
property_value = tokensAndType[1][0])
sentenceForm = graph.find_one("SentenceType",
property_key="label",
property_value = tokensAndType[1][1])
# Link ... |
ollections import Counter
from typing import List, Mapping, Union, Optional
import numpy as np
import pandas as pd
import seaborn as sns
from matplotlib import pyplot as plt
from bartpy.runner import run_models
from bartpy.sklearnmodel import SklearnModel
ImportanceMap = Mapping[int, float]
ImportanceDistributionMap... | p, percentile: float) -> Mapping[int, float]:
"""
Calculate the required proportion of splits to be selected by variable
Creates a distribution of the _highest_ incl | usion percentage of any variable in each of the permuted models
Threshold is set as a percentile of this distribution
All variables have the same threshold
Note that this is significantly more stringent than the local threshold
Parameters
----------
null_distributions: ImportanceDistributionM... |
import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverse
from .models import Question
class QuestionMethodTests(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recently() should return | False for questions whose
pub_date is in the future.
"""
time = timezone.now() + datetime.timedelta(days=30)
future_question = Question(pub_date=time)
self.assertIs(future_question.was_published_recently(), False)
def test_was_published_recently_with_old_question(self):
... | old_question = Question(pub_date=time)
self.assertIs(old_question.was_published_recently(), False)
def test_was_published_recently_with_recent_question(self):
"""
was_published_recently() should return True for questions whose
pub_date is within the last day.
"""
... |
#
# @lc app=leetcode id=103 lang=python3
#
# [103] Binary Tree Zigzag Level Order Traversal
#
from typing import List
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
# @lc code=start
class Solution:
def zigzagL... | if node.right:
new_child.append(node.right)
child = new_child
if current_layer:
if flag:
ret.append(current_layer)
else:
ret.append(current_layer[::-1])
flag = not flag... | Solution()
print(s.zigzagLevelOrder(a))
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
from scrapy.item import Item, Field
class SpiderItem(Item):
# define the fields for your item here like:
# name = scrapy.Field()
brand = Field()
name ... | category = Field()
shopname = Field()
productionName = Field()
productId = Field()
url = Field()
price = Field()
promotionInfo = Field()
monthlySalesVolume = Field()
evaluationNum = Field()
#goodEvaluationNum = Field()
date = Field()
commentCount = Field()
averag | eScore = Field()
goodCount = Field()
goodRate = Field()
generalCount = Field()
generalRate = Field()
poorCount = Field()
poorRate = Field()
showCount = Field()#the comment with picture
commentListPageNum = Field()
imageUrl = Field()
imagePath = Field()
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
)
from zope.sqlalchemy import ZopeTransactionExtension
import tornado.web
fr | om handlers.index import IndexHandler
from handlers.sensors import SensorsHandler
import logging
logging.getLogger().setLev | el(logging.DEBUG)
app = tornado.web.Application([
(r'/', IndexHandler),
(r'/sensors', SensorsHandler)
])
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
|
import pytest
from forte.solvers import solver_factory, HF, ActiveSpaceSolver
def test_detci_4():
"""CASCI test of Forte DETCI using the SparseList algorithm to build the sigma vector"""
ref_hf_energy = -99.977636678461636
ref_fci_energy = -100.113732484560970
xyz = """
F
H 1 1.0
"""
... | hf,
type='detci',
states=state,
mo_spaces=input.mo_spaces(frozen_docc=[1, 0, 0, 0]),
options={'active_ref_type': 'cas'}
)
fci.run()
# check results
assert hf.value('hf energy') == pytest.approx(ref_hf_energy, 1.0e-10)
assert fci.value('active space energy')[state] ==... | ame__ == "__main__":
test_detci_4()
|
import logging
from mimeprovider.documenttype import get_default_document_types
from mimeprovider.client import get_default_client
from mimeprovider.exceptions import MimeException
from mimeprovider.exceptions import MimeBadRequest
from mimeprovider.mimerenderer import MimeRenderer
from mimeprovider.validators impor... | validator = None
if hasattr(o, "schema"):
validator = self.validator(o.schema)
m_value = (mimetype, (t, o, validator))
| o_value = (o, (t, mimetype, validator))
yield m_value, o_value
def register(self, *documents):
documents = list(documents)
for document in documents:
self._validate(document)
generator = self._generate_document_mimetypes(documents)
for (m, m_val... |
rom holoviews.core.overlay import NdOverlay
from holoviews.core.spaces import HoloMap
from holoviews.element import Points
from .testplot import TestMPLPlot, mpl_renderer
from ..utils import ParamLogStream
try:
from matplotlib import pyplot
except:
pass
class TestPointPlot(TestMPLPlot):
def test_points... | self.assertEqual(y_range[1], 3.2)
def test_points_padding_nonsquare(self):
points = Points([1, 2, 3]).options(padding=0.1, aspect=2)
plot = mpl_renderer.get_plot(points)
x_range, y_range = plot.handles['axis'].get_xlim(), plot.handles['axis'].get_ylim()
self.assertEqual(x_ran... | al(y_range[0], 0.8)
self.assertEqual(y_range[1], 3.2)
def test_points_padding_logx(self):
points = Points([(1, 1), (2, 2), (3,3)]).options(padding=0.1, logx=True)
plot = mpl_renderer.get_plot(points)
x_range, y_range = plot.handles['axis'].get_xlim(), plot.handles['axis'].get_ylim()... |
iobase.RangeTracker implementations provided with Apache Beam.
"""
import logging
import math
import threading
from six import integer_types
from apache_beam.io import iobase
__all__ = ['OffsetRangeTracker', 'LexicographicKeyRangeTracker',
'OrderedPositionRangeTracker', 'UnsplittableRangeTracker']
clas... | or split_offset >= self.stop_position()):
logging.debug(
'Refusing to split %r at %d: proposed split position out of range',
self, split_offset)
return
| logging.debug('Agreeing to split %r at %d', self, split_offset)
split_fraction = (float(split_offset - self._start_offset) / (
self._stop_offset - self._start_offset))
self._stop_offset = split_offset
return self._stop_offset, split_fraction
def fraction_consumed(self):
with self._l... |
from django.utils.http import url_has_allowed_host_and_scheme
def ge | t_valid_next_url_from_request(request):
next_url = request.POST.get("next") or request.GET.get("next")
if not next_url or not url_has_allowed_host_and_scheme(
url=next_url, a | llowed_hosts={request.get_host()}
):
return ""
return next_url
|
from bisect import bisect_left
class Solution(object):
def kEmptySlots(self, flowers, k):
"""
:type flowers: List[int]
:type k: int
:rtype: int
"""
S = []
for ithday, n in enumerate(flowers): |
idx = bisect_left(S, n)
if idx > 0 and n - S[idx-1] == k+1:
return ithday + 1
elif idx < len(S) and S[idx] - n == k+1:
return ithday + 1
S.insert(idx, n)
return -1
print Solution().kEmptySlots([1,3,2], 1)
print Solution().kEmptySlots([1,2,3], 1)
| |
# grid.py
#
# Copyright 2009 danc <quaninux@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at... | with a spacing of 1'''
radius = (gridsize*spacing)/2.
cgrid = cube(location, gridsize, spacing)
print cgrid.shape, location
e = np.zeros(np.size(cgrid,1))
g = np.copy(e)
for i in range(0,np.size(cgrid,1)):
#e[:,i] = euclid.dist(location[0],cgrid[0][i],location[1],cgrid[1][i],location[ | 2],cgrid[2][i])
e[i] = euclid.dist(location,cgrid[:,i])
#e = e*10
print 'diameter', e.max(), 'mm'
sgrid = cgrid[:,e < radius].reshape([3,np.size(cgrid[:,e < radius])/3])
#cgrid[e > radius].reshape([3,np.size(cgrid[e > radius])/3]) == 0
return sgrid#,cgrid
|
# Maitre D'
# Demonstrates treating a value as a cond | ition
print("Welcome to the Chateau D' Food")
print("It seems we are quite full this evening.\n")
money = int(input("How many dollars do you slip the Maitre D'"))
if money:
print("Ah, I am reminded of a table. Right this way.")
else:
print("Please, sit. It may be a while.")
input("\n\nPress the enter key t | o exit.")
|
ith('\n'):
block += '\n'
lines.append(block)
line_counts.append(block.count('\n') +1)
return line_counts, lines
def cleanup_option(self, option, default, aphanumeric_only=False):
"""Removes leading or trailing quotes or double-quotes from a string op... |
t = "<pre><span class=\"go\">%s " | % p
i = l.find(t)
if i != -1:
l = "%s<pre class=\"copyable\"><span class=\"gp\">%s </span><span class=\"copyable-text\"><span class=\"go\">%s" % (l[:i], p, l[len(t)+i:])
copyable_text = True
... |
# generat | ed from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "simulation"
PROJECT_SPACE_DIR = "/home/stagste... | N = "0.0.0"
|
#
# Copyright (C) 2011 Red Hat, Inc.
#
# Author: Angus Salkeld <asalkeld@redhat.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any ... | self.name))
zipfilename = os.path.join(self.conf.dbdir, self.name, 'nova.zip')
try:
zip_data = nova_manager.get_credentials(self.username, self.name)
with open(zipfilename, 'w') as f:
f.write(zip_data)
except (exception.UserNotFound, exception.ProjectNotFo... | e. If this is a new '
'installation, you need\nto call something like this:\n\n'
' nova-manage network create pvt 10.0.0.0/8 10 64\n\n')
return False
except exception.ProcessExecutionError, e:
print e
print ("*** The above error may sh... |
fr | om .fake import UserAgent # noqa
| |
#_*_coding: utf-8 _*_
#__author__ = 'Alihanniba'
import urllib.request
# from urllib.request import urlopen
import urllib.error
import re
import os
import taobaotool
import time
class Spider:
def __init__(self):
self.siteUrl = 'http://mm.taobao.com/json/request_top_list.htm'
self.tool = taobaotool... | item[2], item[3], item[4]])
print(item[0], item[1], item[2], item[3], item[4])
return contents
def getDetailPage(self, infoUR | L):
response = urllib.request.urlopen(infoURL)
return response.read().decode('gbk')
def getBrief(self, page):
pattern = re.compile('<div class="mm-aixiu-content".*?>(.*?)<!--',re.S)
result = re.search(pattern, str(page))
return self.tool.replace(result.group(1))
def ge... |
from itertools import *
for i, s in zip(cou | nt(), repeat('over-and-over', | 5)):
print(i, s)
|
from typing import Dict, List
import numpy as np
import hybrid_model
from evaluation import evaluation_metrics
from evaluation import evaluation_parting
metrics_rmse = {'rmse': evaluation_metrics.Rmse()}
metrics_rmse_prec = {'rmse': evaluation_metrics.Rmse(),
'prec@5': evaluation_metrics.Precis... | Evaluation:
def __init__(self,
metrics: Dict[str, evaluation_metrics.Metric] = metrics_rmse_prec,
parts: Dict[str, evaluation_parting.Parting] = parting_full):
self.metrics = metrics
self.parts = parts
def evaluate_hybrid(self, model: 'hybrid_model.hybrid.Hyb... | ) \
-> 'EvaluationResultHybrid':
result = EvaluationResultHybrid()
result.cf = self.evaluate(model.model_cf, x_test, y_test)
result.md = self.evaluate(model.model_md, x_test, y_test)
return result
def evaluate(self, model: 'hybrid_model.models.AbstractModel', x_test: Li... |
# -*- encoding:utf-8 -*-
# sample_CUBRIDdb.py
import CUBRIDdb
con = CUBRIDdb.connect('CUBRID:localhost:33000:demodb:::', 'public')
cur = con.cursor()
cur.execute('DROP TABLE IF EXISTS test_cubrid')
cur.execute('CREATE TABLE test_cubrid (id NUMERIC AUTO_INCREMENT(2009122350, 1), name VARCHAR(50))')
cur.execute("... | ubrid (name) values (?)", ['Tom',])
cur.execute('select * from test_cubrid')
# fetch result use fetchone()
row = cur.fetchone()
print(row)
print('')
# fetch result use f | etchmany()
rows = cur.fetchmany(2)
for row in rows:
print(row)
print("")
rows = cur.fetchall()
for row in rows:
print(row)
cur.close()
con.close()
|
"""
//=========================================================
// OOMidi
// OpenOctave Midi and Audio Editor
// (C) Copyright 2009 Mathias Gyllengahm (lunar_shuttle@users.sf.net)
//=========================================================
"""
import Pyro.core
import time
oom=Pyro.core.getProxyForURI('PYRONAME://:... | j in range(0,5):
for i in range(0,30):
oom.addMidiTrack("amiditrack" + str(i))
for i in range(0,30):
oom.deleteTrack("amiditrack" + str(i))
for i in range(0, 10):
print i
oom.addMidiTrack("amiditrack")
oom.addWaveTrack("awavetrack")
oom.addOutput("anoutput") |
oom.addInput("aninput")
oom.setMute("aninput", False)
oom.setAudioTrackVolume("aninput",1.0)
oom.deleteTrack("amiditrack")
oom.deleteTrack("awavetrack")
oom.deleteTrack("anoutput")
oom.deleteTrack("aninput")
time.sleep(1)
|
#!/usr/local/bin/python
#$Id: logsc.py,v 1.7 2013/11/15 15:07:06 kenji Exp $
from sqlite3 import dbapi2 as sqlite
import sys
# change integer to string if found
def int2str(p):
if type(p) == int:
return str(p)
else:
return p
if __name__ == '__main__':
con = sqlite.connect("/home/kenji/txt/hamradio/LOGS... | `band`, `mode`,
`rst_sent`, `qsl_sent`, `qsl_via`, `comment`, `my_qso_id` from qso
where `call` regexp \'(?i)\' || ? and `qsl_rcvd` <> \'I\'
order by `qso_date` || `time_on`
""", t)
for row in cur.fetchall():
print "-------- | ---"
print "qso_date: ", row[0]
print "time_on: ", row[1]
print "my_call: ", row[2]
print "call: ", row[3]
print "band: ", row[4]
print "mode: ", row[5]
print "rst_sent: ", row[6]
print "qsl_sent: ", row[7]
print "qsl_via: ", row[8]
pri... |
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or a... | elf._fake_service_get_by_compute_host
api_get_mock.side_effects = self._fake_compute_api_get
ctxt = self._get_admin_context()
app = fakes.wsgi_app(fake_auth_context=ctxt)
req = webob.Request.blank('/v2/fake/servers/%s/action' % self.UUID)
req.method = 'POST'
req.body = j... | })
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(400, res.status_int)
|
import unittest
from abc import ABC, abstractmethod
from contextshell.action import ActionExecutor, Executor
from contextshell.backends.node import NodeTreeRoot
from contextshell.backends.virtual import VirtualTree
from contextshell.command import CommandInterpreter
from contextshell.path import NodePath
from contexts... | ) -> Shell:
raise NotImplementedError()
class TreeRootTestsBase(ShellScriptTestsBase):
@abstractmethod
def create_tree_root(self):
| raise NotImplementedError()
def create_shell(self):
self.tree_root = self.create_tree_root()
self.configure_tree_root(self.tree_root)
interpreter = CommandInterpreter(self.tree_root)
shell = Shell(interpreter)
return shell
def configure_tree_root(self, tree_root):
... |
tate_topic"] = "temperature-state"
config["climate"]["temperature_low_state_template"] = "{{ value_json.temp_low }}"
config["climate"]["temperature_high_state_template"] = "{{ value_json.temp_high }}"
assert await async_setup_component(hass, CLIMATE_DOMAIN, config)
await hass.async_block_till_done()
... | async_fire_mqtt_message(hass, "aux-state", "switchmeon")
state = hass.states.get(ENTITY_C | LIMATE)
assert state.attributes.get("aux_heat") == "on"
# anything other than 'switchmeon' should turn Aux mode off
async_fire_mqtt_message(hass, "aux-state", "somerandomstring")
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get("aux_heat") == "off"
# Current temperature
... |
#! /usr/bin/env python
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import * # noqa: F401
'''Runs a full joinmarket pit (using `nirc` miniircd servers,
with `nirc` options specified as an option to pytest),in
bitcoin regtest mode with 3 maker... | )
if malicious:
yg.set_maliciousness(malicious, mtype="tx")
clientfactory = JMClientProtocolFactory(yg, proto_type="MAKER")
nodaemon | = jm_single().config.getint("DAEMON", "no_daemon")
daemon = True if nodaemon == 1 else False
# As noted above, only the final start_reactor() call will
# actually start it!
rs = True if i == num_ygs - 1 else False
start_reactor(jm_single().config.get("DAEMON", "daemon_host"),
... |
import tests.model_control.test_ozone_ | custom_models_enabled as testmod
testmod.build_model( ['Quantization'] , ['ConstantTrend'] , ['Seasonal_M | onthOfYear'] , ['LSTM'] ); |
# Copyright (c) 2019 UAVCAN Consortium
# This software is distributed under the | terms of the MIT License.
# Author: Pavel Kirienko <pavel@uavcan.org>
from ._input import UDPInputSession as UDPInputSession
from ._input import PromiscuousUDPInputSession as PromiscuousUDPInputSession
from ._input import SelectiveUDPInputSession as SelectiveUDPInputSession
from ._input import UDPInputSessionStatist... | ._input import PromiscuousUDPInputSessionStatistics as PromiscuousUDPInputSessionStatistics
from ._input import SelectiveUDPInputSessionStatistics as SelectiveUDPInputSessionStatistics
from ._output import UDPOutputSession as UDPOutputSession
from ._output import UDPFeedback as UDPFeedback
|
from iktomi.utils import cached_property
from .base import Cli
class LazyCli(Cli):
'''
Wrapper for creating lazy command digests.
Sometimes it is not needed to import all of application parts to start
a particular command. LazyCli allows you to define all imports in a
function called only on the ... | al.install)
# ...
def run(args=sys.argv):
| manage(dict(db=db_command, ), args)
'''
def __init__(self, func):
self.get_digest = func
@cached_property
def digest(self):
return self.get_digest()
def description(self, *args, **kwargs):
return self.digest.description(*args, **kwargs)
def __call__(self, *args... |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.contrib.auth.models import User
from django.test.utils import override_settings
from allauth.account import app_settings as account_settings
from allauth.account.models import EmailAddress
from allauth.socialaccount.models im... | class DisqusTests(OAuth2TestsMixin, TestCase):
provider_id = DisqusProvider.id
def get_mocked_response(self,
name='Raymond Penners',
email="raymond.penners@example.com"):
return MockedResponse(200, """
{"response": {"name": "%s",
... | "profileUrl": "https://plus.google.com/108204268033311374519",
"id": "108204268033311374519" }}
""" % (name, email))
def test_account_connect(self):
email = "user@example.com"
user = User.objects.create(username='user',
is_active=Tr... |
# Copyright 2009, Kovid Goyal <kovid@kovidgoyal.net>
# Copyright 2013 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPL v3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/lic... | BOutput
from . import initialize_container
from lxml import etree
def upshift_markup(oeb):
'Upgrade markup to comply with XHTML 1.1 where possible'
for x in oeb.spine:
root = x.data
body = XPath('//h:body')(root)
if body:
body = body[0]
if not hasattr(body, 'xpath'... | nderline')
def convert(oeb, output_path, epub_flatten=False, dont_split_on_page_breaks=False,
flow_size=260, no_default_epub_cover=False, no_svg_cover=False,
preserve_cover_aspect_ratio=False, pretty_print=False):
if epub_flatten:
FlatFilenames()(oeb)
else:
UniqueFilenames()(oeb... |
from PyQt4 import QtCore, QtGui
class Task(QtCore.QThread):
messageAdded = QtCore.p | yqtSignal(QtCore.QString)
def __init__(self, mainWindow, parent = No | ne):
super(Task, self).__init__(parent)
self.mainWindow = mainWindow
self.finished.connect(self.postRun)
self.terminated.connect(self.postTerminated)
def run(self):
"""
The code in this method is run in another thread.
"""
pass
def postRun(self):... |
from django.urls impor | t path
from | . import views
urlpatterns = [path('unsubscribe', views.QueryObserverUnsubscribeView.as_view())]
|
import nacl.encoding
import nacl.public
import nacl.utils
class WhisperKey():
def __init__(self, key=None):
if key is None:
self.generate_keypair()
else:
if isinstance(key, bytes) or isinstance(key, str):
try:
self._private_key = nacl.public.PrivateKey(key, encoder=nacl.encodin... | # Verify that we can convert the public_key to an nacl.public.PublicKey instance
if isinstance(public_key, nacl.public.PublicKey):
pass
elif isinstance(public_key, str) or isinstance(public_key, bytes): # pragma: no cover
public_key = nacl.public.PublicKey(public_key, encoder=nacl.encoding.Base64En... | .")
# Make sure our message is a bytes object, or convert it to one.
if isinstance(message, bytes): # pragma: no cover
pass
elif isinstance(message, str):
message = bytes(message, "utf-8")
else: # pragma: no cover
raise Exception("Message is not bytes or str.")
box = nacl.publi... |
# Copyright 2018 David Vidal <david.vidal@tecnativa.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from | odoo import _, api, models
from odoo.exceptions import UserError
class IrSequence(models.Model):
_inherit = 'ir.sequence'
@api.constrains('prefix', 'code')
def check_simplified_invoice_unique_prefix(self):
if self._context.get('copy_pos_config'):
return
for sequence | in self.filtered(
lambda x: x.code == 'pos.config.simplified_invoice'):
if self.search_count([
('code', '=', 'pos.config.simplified_invoice'),
('prefix', '=', sequence.prefix)]) > 1:
raise UserError(_('There is already a simplified invo... |
# ju | st listing list of requires. will create a set up using these
"""
airflow>=1.7.1,
numpy>=1.1,
requests>=2.1,
pymongo==3.4.0,
pytest>=3.0,
simplejson==3.10.0 | ,
tox==2.6
PyYAML==3.12
"""
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or ag... | permissions and
# limitations under the License.
"""Command for listing regions."""
from googlecloudsdk.api_lib.compute import base_classes
class List(base_classes.GlobalLister):
| """List Google Compute Engine regions."""
@property
def service(self):
return self.compute.regions
@property
def resource_type(self):
return 'regions'
List.detailed_help = base_classes.GetGlobalListerHelp('regions')
|
'''
Run unit tests.
'''
import inspect
import os
import rez.vendor.argparse as argparse
from pkgutil import iter_modules
cli_dir = os.path.dirname(inspect.getfile(inspect.currentframe()))
src_rez_dir = os.path.dirname(cli_dir)
tests_dir = os.path.join(src_rez_dir, 'tests')
all_module_tests = []
def setup_parser(par... | (name)
name_ = name[len(prefix):]
all_module_tests.append(name_)
tests.append((name_, module))
# create argparse entry for each module's unit test
for name, module in sorted(tests):
parser.add_argument(
"--%s" % name, action=AddTestModuleAction, nargs=0,
... | t sys
from rez.vendor.unittest2.main import main
os.environ["__REZ_SELFTEST_RUNNING"] = "1"
if opts.only_shell:
os.environ["__REZ_SELFTEST_SHELL"] = opts.only_shell
if not opts.module_tests and not opts.tests:
module_tests = all_module_tests
else:
module_tests = opts.modul... |
from Website.site_base import BaseHandler
import tornado.web
import tornado
import SQL.table_simulation as SQLsim
class RawPacketHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
if self.current_user is None:
self.redirect('login.html?next=edit')
return
... | hed data is used not (possible) new
def post(self):
print(str(self.request)) |
print('Message: ' + str( self.get_argument('message', '')))
print('Client: ' + str( self.get_argument('client', '')))
print('header: ' + str( self.get_argument('header', '')))
self.redirect('/raw') |
# -*- coding: utf-8 -*-
import os
from collections import defaultdict
from random import choice
world = defaultdict(int)
possiblepoints = [(x, y) for x in range(-15, 16)
for y in range(-15, 16)
if 10 <= abs(x + y * 1j) <= 1 | 5]
for i in range(100):
world[choice(possiblepoints)] += 1
for x in range(-15, 16):
print(''.join(str(min([9, world[(x, y)]])) if world[(x, y)] else ' '
for y in range(-15, 16)))
for i in range(1000):
world[choice(possiblepoints)] += 1
for x in range(-15, 16):
print(''.join(str(min([... | .system("pause")
|
p name='LayersWindowPopup'>
<placeholder name="BasicLayerActions">
<menuitem action='PasteLayer'/>
<menuitem action='ClearLayer'/>
</placeholder>
<placeholder name='AdvancedLayerActions'>
<menuitem action='TrimLayer'/>
</pla... | c-model"""
self._processing_model_updates = True
self._update_a | ll()
self._processing_model_updates = False
def _layer_propchange_cb(self, rootstack, path, layer, changed):
if self._processing_model_updates:
logger.debug("Property change skipped: already processing "
"an update from the document model")
if layer is |
import | sys
file_name = sys.argv[1]
with open(file_name, "r") as f:
num = int(f.readline())
for i in range(num):
first_row = int(f.readline()) - 1
first_board = list()
for x in range(4):
raw_line = f.readline()
line = [int(x) for x in raw_line.split(" ")]
first_board.append(line)
second_row = int(f.read... | e()) - 1
second_board = list()
for x in range(4):
raw_line = f.readline()
line = [int(x) for x in raw_line.split(" ")]
second_board.append(line)
common_values = [x for x in first_board[first_row] if x in second_board[second_row]];
if not common_values:
case_string = "Volunteer cheated!"
elif len(... |
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.client import Client
from django.utils import timezone
from channel_facebook.channel import FacebookChannel
from channel_facebook.models import FacebookAccount
from core import models
class FacebookBaseTestCase(TestCase):
... | = status_code
self.ok = ok
def json(self):
return self.json_data
def setUp(self):
self.time = timezone.now()
self.user = self.create_user()
self.fac | ebook_account = self.create_facebook_account(self.user)
self.channel = FacebookChannel()
self.channel_name = models.Channel.objects.get(name="Facebook").name
self.channel_id = models.Channel.objects.get(name="Facebook").id
self.client = Client()
self.conditions = {'hashtag': '#me... |
from .parameters import ParameterBasedType
from .log import LoggerFactory
from .packer import packer
from .misc.six import add_metaclass
TYPES_DESCRIPTIONS = {'generic' : 'Generic module', 'functions_export': 'Such modules give functions that are useful by evaluation rules',
'connector': 'Suchs ... | tr(self, 'pack_name'):
self.pack_directory = packer.get_pack_directory(self.pack_level, self.pack_name)
else:
self.pack_directory = ''
def get_info(self):
return {'configuration': self.get_config(), 'state | ': 'DISABLED', 'log': ''}
def prepare(self):
return
def launch(self):
return
def export_http(self):
return
# Call when the daemon go down.
# WARNING: maybe the daemon thread is still alive, beware
# of the paralel data access
de... |
from django.test | import TestCase
class AnimalTestCase(TestCase):
def setUp(self):
print 2
def test_animals_can_speak(self):
"""Animals that can speak are | correctly identified"""
print 3
|
s.path.exists(jarpaths[0]): jar_path = jarpaths[0]
elif os.path.exists(jarpaths[1]): jar_path = jarpaths[1]
elif os.path.exists(jarpaths[2]): jar_path = jarpaths[2]
elif os.path.exists(jarpaths[3]): jar_path = jarpaths[3]
elif os.path.exists(jarpaths[4]): jar_path = jarpaths[4]
else: ... | "
"http://h2o-release.s3.amazonaws.com/h2o/{2}/{3}/index.html."
"".format(ver_h2o, str(ver_pkg),branch_name_h2o, build_number_h2o))
self._session_id = H2OConnection.get_json(url_suffix="InitID")["session_key"]
H2OConnection._cluster_info()
@s... | sys_prefix1 = sys_prefix2 = sys.prefix
if sys_prefix1.startswith('/Library'): sys_prefix2 = '/System'+sys_prefix1
elif sys_prefix1.startswith('/System'): sys_prefix2 = sys_prefix1.split('/System')[1]
return [os.path.join(sys_prefix1, "h2o_jar", "h2o.jar"),
os.path.join(os.path.sep,"usr","loc... |
#!/usr/bin/env python3
def main():
# import python's standard math module and numpy
import math, numpy, sys
# import Controller and other blocks from modules
from pyctrl.rc import Controller
from pyctrl.block import Interp, Logger, Constant
from pyctrl.block.system import System, Differen... | e a connection to a windows manager')
sys.exit(0)
# plot pwm
plt.subplot(2,1,1)
plt.plot(data['clock'], data['pwm'], 'b')
plt.ylabel('pwm (%)')
plt.ylim((-120,120))
plt.xlim(0,6)
plt.grid()
# plot encoder
plt.subplot(2,1,2)
plt.plot(data['clock'], data['encoder... | ot(data['clock'], data['pwm'],'g', label='pwm')
ax1.set_ylabel('pwm (%)')
ax1.set_ylim((-60,120))
ax1.grid()
plt.legend(loc = 2)
# plot velocity
ax2 = plt.twinx()
ax2.plot(data['clock'], data['speed'],'b', label='speed')
ax2.plot(data['clock'], data['speed_reference'], 'r', label='refe... |
#!/usr/bin/env python3
# pylint: disable=C0103, C0325, C0301
"""
Zipped Agoda Hotel Data File Parser
-----------------------------------
This utility unzips and parses the Agoda hotel data file, in-memory,
and makes the data available
"""
import csv
import zipfile
import io
import sys
class AgodaParser(object):
... | not float == type(row['rates_from']):
try:
rates_from = float(row['rates_from'])
| except ValueError:
#print("ERROR: Unable to convert '{0}' to float for '{1}'".format(row['rates_from'], row['hotel_name']))
#print("DEBUG: '{0}'".format(row))
rates_from = 'Rates Not Available'
else:
... |
import os
import osiris
import globalvars
class OmlRotator(osiris.OMLHtmlWrapper):
def __init__(self, tag):
osiris.OMLHtmlWrapper.__init__(self, tag, "div", False, "", "", "")
def processHtml(self, item, context):
extensionID = globalvars.extension.getID().getString()
context.page.addJavascript("/... | s.UniqueID.generate().getString())
script = "<script type=\"text/javascript\">Rotator.init(' | " + item.getParam("id") + "');</script>";
return osiris.OMLHtmlWrapper.processHtml(self, item, context) + script;
|
# SharePlum
# This library simplfies the code necessary
# to automate interactions with a SharePoint
# server using python
from .office365 import | Office365 # noqa: F401
from .site import Site # noqa: F401
from .version import __version__ # noqa: F401
__all__ = ["site", "office365"]
__title__ = "SharePlum SharePoint | Library"
__author__ = "Jason Rollins"
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from mock import Mock
from gerrit import GerritClient
from test import CrBuildTestCase
SHORT_CHANGE_ID = 'I7c1811882cf59c1dc55018926edb6d35295c53b8'
CHANGE... | 'labels': labels,
'notify': 'ALL',
})
wit | h self.assertRaises(AssertionError):
client.set_review(CHANGE_ID, REVISION, notify='Argh!')
|
"""Tool specific version checking to identify out of date dependencies.
This provides infrastructure to check version strings against installed
tools, enabling re-installation if a version doesn't match. This is a
lightweight way to avoid out of date dependencies.
"""
from __future__ import print_function
from distuti... | rom cloudbio.custom import shared
from cloudbio.fabutils import quiet
def _parse_from_stdoutflag(out, flag, stdout_index=-1):
"""Extract version information from a flag in verbose stdout.
flag -- text information to identify the line we should split for a version
stdout_index -- Position of the version in... | ine.split()
return parts[stdout_index].strip()
print("Did not find version information with flag %s from: \n %s" % (flag, out))
return ""
def _clean_version(x):
if x.startswith("upstream/"):
x = x.replace("upstream/", "")
if x.startswith("("):
x = x[1:].strip()
if x.ends... |
from .st | ats import * | # noqa
|
# Deleting field 'Pupil.birthday'
db.delete_column('gsaudit_pupil', 'birthday')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', []... | _key': 'True'}),
'jsondata': ('jsonfield.JSONField', [], {'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'note': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'pupil': ('django.db.models.fields.related.For... | o.db.models.fields.related.ForeignKey', [], {'to': "orm['gsaudit.Skill']"}),
'written_exam': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'gsaudit.pupiltainfo': {
'Meta': {'unique_together': "(('pupil', 'teaching_assignment'),)", 'object_name': 'PupilTAIn... |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: Qiang Li
# Email: liqiangneu@gmail.compile
# Time: 10:27, 03/30/2017
import sys
import codecs
import argparse
import random
from io import open
argparse.open = open
reload(sys)
sys.setdefaultencoding('utf8')
if sys.version_info < (3, 0):
sys.stderr = codecs.g... | ne_label = ''
else:
words = line.split('\t')
total_words += 1
if words[0] == '':
words[0] = 'NA'
if words[3] == '':
words[3] = 'O'
if "NP" in words[3]:
words[0] = '#'
words[3] = '#'
remove_words += 1
line_word += ' '+words[0]
... | reserve_words = total_words - remove_words
ologfobj.write('reserve word:{0}\n'.format(reserve_words))
reserve_rate = float(reserve_words) / float(total_words)
print reserve_rate
ologfobj.write('reserve rate:{0}\n'.format(reserve_rate))
if __name__ == '__main__':
parser = create_parser()
args = parser.p... |
import math,sys
from math import pi
def ieee754 (a):
rep = 0
#sign bit
if (a<0):
rep = 1<<31
a = math.fabs(a)
if (a >= 1):
#exponent
exp = int(math.log(a,2))
rep = rep|((exp+127)<<23)
#mantissa
temp = a / pow(2,exp) - 1
i = 22
while i>=0:
temp = temp * 2
if temp > 1:
rep = rep | (1... | cessor)'''
x_processor = str(00000000);
y_processor = str(00000000);
z_processor = time*pi/180
z_float1 = float(z_processor)
z_processor = ieee754(z_float1)
z_processor = hex(z_processor)
z_processor = z_processor. | split('x')
z_processor = z_processor[1]
z_processor = str(z_processor)
x = x+"32'h"+x_processor +";"
y = y+"32'h"+y_processor +";"
z = z+"32'h"+z_processor +";"
print x
print y
print z
print Opcode
print InsTagIn
'''if i ==0:
sine = math.sin(z_float1)
sine = ieee754(sine)
sine = hex(sin... |
import sys
def addAbilities(core, actor, player):
actor.addAbility("sm_inside_i | nformation")
return
def removeAbilities(core, ac | tor, player):
actor.removeAbility("sm_inside_information")
return
|
c, dict) and not isinstance(doc, list):
return # mongodb skips such cases
if isinstance(doc, list):
try:
if part == '$':
doc = doc[0]
else:
doc = doc[int(part)]
continu... | uce_ctx = execjs.compile("""
function doReduce(fnc, docList) {
var reducedList = new Array();
reducer = eval('('+fnc+')');
for | (var key in docList) {
var reducedVal = {'_id': key,
'value': reducer(key, docList[key])};
reducedList.push(reducedVal);
}
return reducedList;
}
""")
doc_list = [json.dumps(doc, default=json_u... |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (th... | tapipe.benchmark import assess
def norm_angle_diff(angle_in_degrees):
"""Normalize the difference of 2 angles in degree.
This function is used to normalize the "delta psi" angle.
"""
return np.abs(np.mod(angle_in_degrees + 90, 180) - 90.)
# OPTIMIZER ################################################... | t_files, max_num_img=None, aggregation_method="mean"):
self.call_number = 0
# Init the wavelet class
self.cleaning_algorithm = Tailcut()
# Make the image list
self.input_files = input_files
self.max_num_img = max_num_img
self.aggregation_method = aggregation_me... |
models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmar... | .messageindex': {
'Meta': {'unique_together': "(('column', 'value', 'object_id'),)", 'object_name': 'MessageIndex'},
'column': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
... | veIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ... |
# docstrings not neede here (the type handler doubleerfaces are fully
# documented in base.py) pylint: disable-msg=C0111
from .base import ReturnValue, Parameter, \
ReverseWrapperBase, ForwardWrapperBase
class PyObjectParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['PyObject*']
d... | transfer_ownership
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
if self.transfer_ownership:
wrapper.build_params.add_parameter('N', [self.value])
else:
wrapper.build_params.add_parameter('O', [self.value])
def convert_p... | (self.ctype_no_const, self.name)
wrapper.parse_params.add_parameter('O', ['&'+name], self.name)
wrapper.call_params.append(name)
if self.transfer_ownership:
wrapper.before_call.write_code("Py_INCREF((PyObject*) %s);" % name)
class PyObjectReturnValue(ReturnValue):
CTYPES = ['P... |
_404_URLS=(re.compile(r'foo'),),
MANAGERS=('PHB@dilbert.com',),
)
class BrokenLinkEmailsMiddlewareTest(TestCase):
def setUp(self):
self.req = HttpRequest()
self.req.META = {
'SERVER_NAME': 'testserver',
'SERVER_PORT': 80,
}
self.req.path = self.req.path_i... | .process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 1)
def test_custom_request_checker(self):
class SubclassedMiddleware(BrokenLinkEmailsMiddleware):
ignored_user_agent_patterns = (re.compile(r'Spider.*'),
re.compile(r'Rob... | user-agent in addition to normal checks.'''
if super(SubclassedMiddleware, self).is_ignorable_request(request, uri, domain, referer):
return True
user_agent = request.META['HTTP_USER_AGENT']
return any(pattern.search(user_agent) for pattern in
... |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('usermodule', '0002_auto_201 | 51108_2019'),
]
operations = [
migrations.CreateModel(
name='Period',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, p | rimary_key=True)),
('name', models.CharField(max_length=10)),
('start_date', models.DateField()),
('end_date', models.DateField()),
('professor', models.ForeignKey(to='usermodule.Professor')),
],
),
]
|
# -*- coding: utf-8 -*-
import datetime
from django.contrib import admin
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from newsletter.models import Newsletter, Newsle | tterSubscription
| from newsletter.views import send_mass_email
def send_emails(newsletter, emails):
if settings.DEBUG == True:
emails = [d[1] for d in settings.ADMINS]
send_mass_email(settings.EMAIL_FROM, None, emails, newsletter.title, newsletter.txt, newsletter.html)
if settings.DEBUG != True:
newsletter... |
from Database.Controllers.Curso import Curso
class Periodo(object):
def __init__(self,dados=None):
if dados is not None:
self.id = dados ['id']
self.id_curso = dados ['id_curso']
self.periodo = dados ['periodo']
self.creditos = dados ['creditos']
def getId(self):
ret | urn self.i | d
def setId_curso(self,id_curso):
self.id_curso = id_curso
def getId_curso(self):
return self.Id_curso
def getCurso(self):
return (Curso().pegarCurso('where id = %s', (self.id_curso,))).getNome()
def setPeriodo(self,periodo):
self.periodo = periodo
def getPeriodo(self):
return self.periodo
... |
fr | om django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'metuly.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/login/$'... | jango.contrib.auth.views.login'),
url(r'^accounts/logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}),
url(r'', include('meddit.urls')),
)
|
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License ... | all permissions to the creator of the node.
make_acl('auth', '', all=True)
)
)
_zk.start()
_zk.add_auth('digest', jones_credential)
_zk.DataWatch('/services', func=ensure_root)
return _zk
def ensure_root(data, stat):
if not data:
get_zk().ens... | assert t in types
best = request.accept_mimetypes \
.best_match(types)
return best == t
@app.template_filter()
def as_json(d, indent=None):
return Markup(json.dumps(d, indent=indent))
@app.context_processor
def inject_services():
return dict(services=[child for child in get_zk().get_chil... |
'''
Created on 12.03.2016
@author: michael
'''
import unittest
from unittest.mock import MagicMock
from alexandriabase.daos import CreatorDao
from alexandriabase.domain import Creator
from alexandriabase.services import CreatorSe | rvice
class CreatorServiceTest(unittest.TestCase):
def testFindVisible(self):
| dao = MagicMock(spec=CreatorDao)
dao.find_all_visible.return_value = [Creator(34), Creator(35)]
service = CreatorService(dao)
result = service.find_all_active_creators()
self.assertEqual(35, result[1].id)
dao.find_all_visible.assert_called_once_with()
if ... |
om_pydict({
'a': pa.array([1, 2, 3]),
'b': pa.array(['a', 'b', 'c']),
'c': pa.array(['x', 'y', 'z'])
})
return data_table
@pytest.fixture(scope='module')
def basic_encryption_config():
basic_encryption_config = pe.EncryptionConfiguration(
footer_key=FOOTER_KEY_NAME,
... | ecryption properties
wrong_kms_connection_config = pe.KmsConnectionConfig(
custom_kms_conf={
# Wrong keys - mixup in names
FOOTER_KEY_NAME: COL_KEY.decode("UTF-8"),
COL_KEY_NAME: FO | OTER_KEY.decode("UTF-8"),
}
)
decryption_config = pe.DecryptionConfiguration(
cache_lifetime=timedelta(minutes=5.0))
with pytest.raises(ValueError, match=r"Incorrect master key used"):
read_encrypted_parquet(
path, decryption_config, wrong_kms_connection_config,
... |
'''
Created on Apr 25, 2017
@author: kashefy
'''
import numpy as np
import h5py
from nideep.iow.file_system_utils import g | en_paths, filter_is_h5
def id_loc_to_loc(fpath_src, key_dst, key_src='label_id_loc', has_void_bin=True):
with h5py.File(fpath_src, 'r+') as h:
if has_void_bin:
l = np.sum(h[key_src][...,:-1], axis=1)
else:
l = np.sum(h[key_src], axis=1) |
l = np.expand_dims(l, 1)
h[key_dst] = l
def walk_id_loc_to_loc(dir_src, key_dst):
def runner(fpath):
if filter_is_h5(fpath):
id_loc_to_loc(fpath, key_dst)
return True # otherwise gen_paths won't append to list
flist = gen_paths(dir_src, func_filter=... |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | ntribEstimatorPredictor`.
Args:
estimator: an instance of `tf.contrib.learn.Estimator`.
prediction_input_fn: a function that takes no arguments and returns an
instance of `InputFnOps`.
input_alternative_key: Optional. Specify the input alternative used for
prediction.
output... | ed for single-headed models but required for
multi-headed models.
graph: Optional. The Tensorflow `graph` in which prediction should be
done.
config: `ConfigProto` proto used to configure the session.
"""
self._graph = graph or ops.Graph()
with self._graph.as_default():
inp... |
nslate.convert import xliff2po
from translate.misc import wStringIO
from translate.storage.test_base import headerless_len, first_translatable
class TestXLIFF2PO:
xliffskeleton = '''<?xml version="1.0" ?>
<xliff version="1.1" xmlns="urn:oasis:names:tc:xliff:document:1.1">
<file original="filename.po" source-lang... | ace="preserve">
<source>nonsense</source>
<target>matlhapolosa</target>
<context-group name="po-entry" purpose="information">
<context context-type="x-po-autocomment">Note that this is
garbage</context>
</context-group>
<note from="developer">Note that this is
garbage... | t = first_translatable(pofile)
assert unit.getnotes("developer") == "Note that this is\ngarbage"
potext = str(pofile)
assert potext.index("#. Note that this is\n#. garbage\n") >= 0
def test_locations(self):
"""Tests location comments (#:)"""
minixlf = self.xliffskeleton % ''... |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of | the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF | ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Initial no-op Yoga contract migration.
Revision ID: e25ffa003242
Revises: 27e647c0fad4
Create Date: 2022-01-21 00:00:00.000000
"""
# revision identifiers, used by Alembic.
re... |
import unittest
from itertools import product
from obfusc8.circuit import *
from obfusc8.bp import *
#enable testing of 'private' module member functions, somewhat sloppy style but I prefer it to any alternative
from obfusc8.bp import _matrix2cycle
class TestBranchingProgram(unittest.TestCase):
def setUp(self):
... | 4)', _matrix2cycle(_special1()), 'wrong on input %s'%_special1())
self.assertEqual('(243)', _matrix2cycle(_special2()), 'wrong on input %s'%_sp | ecial2())
self.assertEqual('(234)', _matrix2cycle(_special3()), 'wrong on input %s'%_special3())
if __name__ == '__main__':
unittest.main()
|
import mms
import unittest
from mooseutils import fuzzyAbsoluteEqual
class TestOutflow(unittest.TestCase):
def test(self):
| df1 = mms.run_spatial('advection-outflow.i', 7, y_pp=['L2u', 'L2v'])
fig = mms.ConvergencePlot(xlabel='Element Size ($h$)', ylabel='$L_2$ Error')
fig.plot(df1,
label=['L2u', 'L2v'],
marker='o',
| markersize=8,
num_fitted_points=3,
slope_precision=1)
fig.save('outflow.png')
for label,value in fig.label_to_slope.items():
if label == 'L2u':
self.assertTrue(fuzzyAbsoluteEqual(value, 1., .05))
else:
self... |
# coding=utf-8
from string import ascii_uppercase
import flask_featureflags
from app.main import main
from flask import render_template, request
from app.helpers.search_helpers import get_template_data
from app import data_api_client
import re
try:
from urlparse import urlparse, parse_qs
except ImportError:
fr... | , page, 'gcloud')
suppliers = api_result["suppliers"]
li | nks = api_result["links"]
template_data = get_template_data(main, {
'title': 'Digital Marketplace - Suppliers'
})
return render_template('suppliers_list.html',
suppliers=suppliers,
nav=ascii_uppercase,
count=len(suppl... |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agr | eed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, eith | er express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.test.integration_tests import helpers
from openstack_dashboard.test.integration_tests.regions import messages
from sahara_dashboard.test.integration_tests.helper... |
import os
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.site.site_header = os.environ.get('{{co | okiecutter.env_prefix}}_TITLE', '{{cookiecutter.project_name}} Admin')
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^api/auth/',
include('rest_framework.urls', namespace='rest_framework')),
url(r'^api/token-auth/',
'rest_framework.authtoken.views.obtain... | n'),
url(r'^', include('{{cookiecutter.app_name}}.urls')),
)
|
# This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
from trytond.model import fields
from trytond.pool import Pool, PoolMeta
__all__ = ['StockMove']
__metaclass__ = PoolMeta
class StockMove:
__name__ = 'stock.move'
i... | voice_line.quantity, self.uom)
return quantity
@classmethod
def copy(cls, moves, default=None):
if default is None:
default = {}
else:
default = default.copy()
default.setdefault('invoice_lines', None)
return super(StockMove, cls).copy(moves, defa... | t=default)
|
"""Gross moist stability-related quantities."""
from aospy.constants import c_p, grav, L_v
from aospy.utils.vertcoord import to_pascal
from indiff.deriv import EtaCenDeriv, CenDeriv
import numpy as np
from .. import PLEVEL_STR
from . import horiz_divg, vert_divg
from .thermo import dse, mse, fmse
def field_vert_int_... | p)
def gms_like_ratio(weights, tracer, dp):
"""Compute ratio of integrals in the style of gross moist stability."""
# Integrate weights over lower tropospheric layer
dp = to_pascal(dp)
denominator = field_vert_int_max(weights, dp)
# Integrate tracer*weights over whole column and divide.
numera... | on, in horizontal divergence form."""
divg = horiz_divg(u, v, radius)
return L_v*gms_like_ratio(divg, sphum, dp)
def gross_dry_stab(temp, hght, u, v, radius, dp):
"""Gross dry stability, in horizontal divergence form."""
divg = horiz_divg(u, v, radius)
return -gms_like_ratio(divg, dse(temp, hght),... |
from email import message_from_file
from pkg_resources import working_set as WS
import path
from pathlib import Path
from pkg_resources import *
from pkg_resources import DistInfoDistribution, Distribution
import distutils.dist
import pkg_resources
import dpath
import sys, os, re
from distutils.errors import *
from dis... | return self.description
def get_state(self):
class_dict = self.__dict__
if 'function' in class_dict.keys():
del class_dict['function']
return class_dict
def on_search(self, key, search_function=None):
searcher = search_function if search_fu... | ]
else:
seek = searcher(key)
if seek is not None:
self.storage[key] = seek
self.items.add(key)
self.__setattr__(key, seek)
return seek
return None
def __getattr__(self, attr):
return self[attr]
def __repr__(s... |
class Module:
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'Prompt',
# list of one or more authors for the module
... |
# True if the module needs to run in the background
'Background' : False,
# File extension to save the file as
'OutputExtension' : "",
# if the module needs administrative privileges
'NeedsAdmin' : False,
# True if the method doesn... | 'OpsecSafe' : False,
# the module language
'Language' : 'python',
# the minimum language version needed
'MinLanguageVersion' : '2.6',
# list of any references/other comments
'Comments': [
"https://github.com/fuzzynop/FiveO... |
(self, orm):
# Deleting field 'InstanceApplication.network'
db.delete_column('apply_instanceapplication', 'network_id')
models = {
'apply.instanceapplication': {
'Meta': {'object_name': 'InstanceApplication'},
'admin_contact_email': ('django.db.mode... | odels.fields.related.ForeignKey', [], {'to': "orm['apply.Organization']"}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'vcpus': ('django.db.models.fields.IntegerField', [], {})
| },
'apply.organization': {
'Meta': {'object_name': 'Organization'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('... |
# -*- coding: utf-8 -*
"""HydraTK installation commands
.. module:: lib.install.command
:platform: Unix
:synopsis: HydraTK installation commands
.. moduleauthor:: Petr Rašek <bowman@hydratk.org>
"""
from subprocess import call, Popen, PIPE
from os import path, environ
from sys import exit
from hydratk.lib.syst... | print('Failed to install {0}, hydratk installation failed.'.format(module))
print(err)
exit(-1)
def uninstall_pip(module):
"""Method uninstalls python module via pip
Args:
module (str): python module
Returns:
none
"""
print ('Uninstalling ... | (module))
cmd = 'pip uninstall -y {0}'.format(module)
result, _, err = shell_exec(cmd, True)
if (result != 0):
print('Failed to uninstall {0}'.format(module))
print(err)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.