signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def derivative ( self , t , n = 1 ) :
"""returns the nth derivative of the segment at t .
Note : Bezier curves can have points where their derivative vanishes .
If you are interested in the tangent direction , use the unit _ tangent ( )
method instead .""" | p = self . bpoints ( )
if n == 1 :
return 2 * ( ( p [ 1 ] - p [ 0 ] ) * ( 1 - t ) + ( p [ 2 ] - p [ 1 ] ) * t )
elif n == 2 :
return 2 * ( p [ 2 ] - 2 * p [ 1 ] + p [ 0 ] )
elif n > 2 :
return 0
else :
raise ValueError ( "n should be a positive integer." ) |
def copy ( self ) :
"""Make deep copy of this KeyBundle
: return : The copy""" | kb = KeyBundle ( )
kb . _keys = self . _keys [ : ]
kb . cache_time = self . cache_time
kb . verify_ssl = self . verify_ssl
if self . source :
kb . source = self . source
kb . fileformat = self . fileformat
kb . keytype = self . keytype
kb . keyusage = self . keyusage
kb . remote = self . remote
retu... |
def get_account_entitlement_for_user ( self , user_id , determine_rights = None , create_if_not_exists = None ) :
"""GetAccountEntitlementForUser .
[ Preview API ] Get the entitlements for a user
: param str user _ id : The id of the user
: param bool determine _ rights :
: param bool create _ if _ not _ ex... | route_values = { }
if user_id is not None :
route_values [ 'userId' ] = self . _serialize . url ( 'user_id' , user_id , 'str' )
query_parameters = { }
if determine_rights is not None :
query_parameters [ 'determineRights' ] = self . _serialize . query ( 'determine_rights' , determine_rights , 'bool' )
if create... |
def _wait_for_file ( cls , filename , timeout = FAIL_WAIT_SEC , want_content = True ) :
"""Wait up to timeout seconds for filename to appear with a non - zero size or raise Timeout ( ) .""" | def file_waiter ( ) :
return os . path . exists ( filename ) and ( not want_content or os . path . getsize ( filename ) )
action_msg = 'file {} to appear' . format ( filename )
return cls . _deadline_until ( file_waiter , action_msg , timeout = timeout ) |
def split_by_length_of_utterances ( self , proportions = { } , separate_issuers = False ) :
"""Split the corpus into subsets where the total duration of subsets are proportional to the given proportions .
The corpus gets splitted into len ( proportions ) parts , so the number of utterances are
distributed accor... | utterance_to_duration = { }
if separate_issuers : # Count total length of utterances per issuer
issuer_utts_total_duration = collections . defaultdict ( float )
issuer_utts = collections . defaultdict ( list )
for utterance in self . corpus . utterances . values ( ) :
issuer_utts_total_duration [ ut... |
def compute_tls13_traffic_secrets ( self ) :
"""Ciphers key and IV are updated accordingly for Application data .
self . handshake _ messages should be ClientHello . . . ServerFinished .""" | hkdf = self . prcs . hkdf
self . tls13_master_secret = hkdf . extract ( self . tls13_handshake_secret , None )
cts0 = hkdf . derive_secret ( self . tls13_master_secret , b"client application traffic secret" , b"" . join ( self . handshake_messages ) )
self . tls13_derived_secrets [ "client_traffic_secrets" ] = [ cts0 ]... |
def _add_fold_decoration ( self , block , region ) :
"""Add fold decorations ( boxes arround a folded block in the editor
widget ) .""" | deco = TextDecoration ( block )
deco . signals . clicked . connect ( self . _on_fold_deco_clicked )
deco . tooltip = region . text ( max_lines = 25 )
deco . draw_order = 1
deco . block = block
deco . select_line ( )
deco . set_outline ( drift_color ( self . _get_scope_highlight_color ( ) , 110 ) )
deco . set_background... |
def uniformVectorRDD ( sc , numRows , numCols , numPartitions = None , seed = None ) :
"""Generates an RDD comprised of vectors containing i . i . d . samples drawn
from the uniform distribution U ( 0.0 , 1.0 ) .
: param sc : SparkContext used to create the RDD .
: param numRows : Number of Vectors in the RDD... | return callMLlibFunc ( "uniformVectorRDD" , sc . _jsc , numRows , numCols , numPartitions , seed ) |
def _is_valid_script_engine ( zap , engine ) :
"""Check if given script engine is valid .""" | engine_names = zap . script . list_engines
short_names = [ e . split ( ' : ' ) [ 1 ] for e in engine_names ]
return engine in engine_names or engine in short_names |
def transform_annotation ( self , ann , duration ) :
'''Apply the chord transformation .
Parameters
ann : jams . Annotation
The chord annotation
duration : number > 0
The target duration
Returns
data : dict
data [ ' pitch ' ] : np . ndarray , shape = ( n , 12)
data [ ' root ' ] : np . ndarray , sh... | # Construct a blank annotation with mask = 0
intervals , chords = ann . to_interval_values ( )
# Get the dtype for root / bass
if self . sparse :
dtype = np . int
else :
dtype = np . bool
# If we don ' t have any labeled intervals , fill in a no - chord
if not chords :
intervals = np . asarray ( [ [ 0 , dur... |
def namebase ( self ) :
"""The same as : meth : ` name ` , but with one file extension stripped off .
For example ,
` ` Path ( ' / home / guido / python . tar . gz ' ) . name = = ' python . tar . gz ' ` ` ,
but
` ` Path ( ' / home / guido / python . tar . gz ' ) . namebase = = ' python . tar ' ` ` .""" | base , ext = self . module . splitext ( self . name )
return base |
def next_future_job_delta ( self ) -> Optional [ float ] :
"""Give the amount of seconds before the next future job is due .""" | job = self . _get_next_future_job ( )
if not job :
return None
return ( job . at - datetime . now ( timezone . utc ) ) . total_seconds ( ) |
def AsJsonString ( self ) :
"""A JSON string representation of this User instance .
Returns :
A JSON string representation of this User instance""" | return json . dumps ( self . AsDict ( dt = False ) , sort_keys = True ) |
def shared_like ( param , suffix , init = 0 ) :
'''Create a Theano shared variable like an existing parameter .
Parameters
param : Theano variable
Theano variable to use for shape information .
suffix : str
Suffix to append to the parameter ' s name for the new variable .
init : float or ndarray , optio... | return theano . shared ( np . zeros_like ( param . get_value ( ) ) + init , name = '{}_{}' . format ( param . name , suffix ) , broadcastable = param . broadcastable ) |
def readpar ( par_file , root ) :
"""Read StagYY par file .
The namelist is populated in chronological order with :
- : data : ` PAR _ DEFAULT ` , an internal dictionary defining defaults ;
- : data : ` PAR _ DFLT _ FILE ` , the global configuration par file ;
- ` ` par _ name _ defaultparameters ` ` if it ... | par_nml = deepcopy ( PAR_DEFAULT )
if PAR_DFLT_FILE . is_file ( ) :
_enrich_with_par ( par_nml , PAR_DFLT_FILE )
else :
PAR_DFLT_FILE . parent . mkdir ( exist_ok = True )
f90nml . write ( par_nml , str ( PAR_DFLT_FILE ) )
if not par_file . is_file ( ) :
raise NoParFileError ( par_file )
par_main = f90nm... |
def mapping ( self ) :
"""Return the constructed mappings .
Invert these to map internal indices to external ids .
Returns
( user id map , user feature map , item id map , item id map ) : tuple of dictionaries""" | return ( self . _user_id_mapping , self . _user_feature_mapping , self . _item_id_mapping , self . _item_feature_mapping , ) |
def Add ( self , service , method , request , global_params = None ) :
"""Add a request to the batch .
Args :
service : A class inheriting base _ api . BaseApiService .
method : A string indicated desired method from the service . See
the example in the class docstring .
request : An input message appropr... | # Retrieve the configs for the desired method and service .
method_config = service . GetMethodConfig ( method )
upload_config = service . GetUploadConfig ( method )
# Prepare the HTTP Request .
http_request = service . PrepareHttpRequest ( method_config , request , global_params = global_params , upload_config = uploa... |
def add_rect ( self , width , height , rid = None ) :
"""Add rectangle of widthxheight dimensions .
Arguments :
width ( int , float ) : Rectangle width
height ( int , float ) : Rectangle height
rid : Optional rectangle user id
Returns :
Rectangle : Rectangle with placemente coordinates
None : If the r... | assert ( width > 0 and height > 0 )
# Obtain the best section to place the rectangle .
section , rotated = self . _select_fittest_section ( width , height )
if not section :
return None
if rotated :
width , height = height , width
# Remove section , split and store results
self . _sections . remove ( section )
... |
def query_for_observations ( mjd , observable , runid_list ) :
"""Do a QUERY on the TAP service for all observations that are part of runid ,
where taken after mjd and have calibration ' observable ' .
Schema is at : http : / / www . cadc - ccda . hia - iha . nrc - cnrc . gc . ca / tap / tables
mjd : float
... | data = { "QUERY" : ( "SELECT Observation.target_name as TargetName, " "COORD1(CENTROID(Plane.position_bounds)) AS RA," "COORD2(CENTROID(Plane.position_bounds)) AS DEC, " "Plane.time_bounds_lower AS StartDate, " "Plane.time_exposure AS ExposureTime, " "Observation.instrument_name AS Instrument, " "Plane.energy_bandpassN... |
def backup_list ( self , query , detail ) :
"""Lists base backups and basic information about them""" | import csv
from wal_e . storage . base import BackupInfo
bl = self . _backup_list ( detail )
# If there is no query , return an exhaustive list , otherwise
# find a backup instead .
if query is None :
bl_iter = bl
else :
bl_iter = bl . find_all ( query )
# TODO : support switchable formats for difference needs ... |
def has_segment_tables ( xmldoc , name = None ) :
"""Return True if the document contains a complete set of segment
tables . Returns False otherwise . If name is given and not None
then the return value is True only if the document ' s segment
tables , if present , contain a segment list by that name .""" | try :
names = lsctables . SegmentDefTable . get_table ( xmldoc ) . getColumnByName ( "name" )
lsctables . SegmentTable . get_table ( xmldoc )
lsctables . SegmentSumTable . get_table ( xmldoc )
except ( ValueError , KeyError ) :
return False
return name is None or name in names |
def accepts ( ** schemas ) :
"""Create a decorator for validating function parameters .
Example : :
@ accepts ( a = " number " , body = { " + field _ ids " : [ int ] , " is _ ok " : bool } )
def f ( a , body ) :
print ( a , body [ " field _ ids " ] , body . get ( " is _ ok " ) )
: param schemas : The sche... | validate = parse ( schemas ) . validate
@ decorator
def validating ( func , * args , ** kwargs ) :
validate ( inspect . getcallargs ( func , * args , ** kwargs ) , adapt = False )
return func ( * args , ** kwargs )
return validating |
def create_ap ( self , args ) :
"""申请接入点
申请指定配置的接入点资源 。
Args :
- args : 请求参数 ( json ) , 参考 http : / / kirk - docs . qiniu . com / apidocs /
Returns :
返回一个tuple对象 , 其格式为 ( < result > , < ResponseInfo > )
- result 成功返回申请到的接入点信息 , 失败返回 { " error " : " < errMsg string > " }
- ResponseInfo 请求的Response信息""" | url = '{0}/v3/aps' . format ( self . host )
return self . __post ( url , args ) |
def is_enabled ( self , name ) :
"""Check if given service name is enabled""" | if self . services and name in self . services :
return self . services [ name ] [ 'config' ] == 'enabled'
return False |
def set_connection ( connection = defaults . sqlalchemy_connection_string_default ) :
"""Set the connection string for SQLAlchemy
: param str connection : SQLAlchemy connection string""" | cfp = defaults . config_file_path
config = RawConfigParser ( )
if not os . path . exists ( cfp ) :
with open ( cfp , 'w' ) as config_file :
config [ 'database' ] = { 'sqlalchemy_connection_string' : connection }
config . write ( config_file )
log . info ( 'create configuration file %s' , cfp... |
def fetch ( self , obj , include_meta = False , chunk_size = None , size = None , extra_info = None ) :
"""Fetches the object from storage .
If ' include _ meta ' is False , only the bytes representing the
stored object are returned .
Note : if ' chunk _ size ' is defined , you must fully read the object ' s ... | return self . object_manager . fetch ( obj , include_meta = include_meta , chunk_size = chunk_size , size = size ) |
def backing_type_for ( value ) :
"""Returns the DynamoDB backing type for a given python value ' s type
4 - > ' N '
[ ' x ' , 3 ] - > ' L '
{2 , 4 } - > ' SS '""" | if isinstance ( value , str ) :
vtype = "S"
elif isinstance ( value , bytes ) :
vtype = "B"
# NOTE : numbers . Number check must come * * AFTER * * bool check since isinstance ( True , numbers . Number )
elif isinstance ( value , bool ) :
vtype = "BOOL"
elif isinstance ( value , numbers . Number ) :
vty... |
def compare_notebooks ( notebook_expected , notebook_actual , fmt = None , allow_expected_differences = True , raise_on_first_difference = True , compare_outputs = False ) :
"""Compare the two notebooks , and raise with a meaningful message
that explains the differences , if any""" | fmt = long_form_one_format ( fmt )
format_name = fmt . get ( 'format_name' )
# Expected differences
allow_filtered_cell_metadata = allow_expected_differences
allow_missing_code_cell_metadata = allow_expected_differences and format_name == 'sphinx'
allow_missing_markdown_cell_metadata = allow_expected_differences and fo... |
def changed ( self , src , path , dest ) :
"""If ` path ` does not have any parents , it is built . Otherwise , it will
attempt to build every parent of ` path ` ( or their parents ) . Output file
modification times are taken into account to prevent unnecessary
builds .""" | modified = { path : self . parents [ path ] . updated }
while True :
for path in modified :
if self . parents [ path ] :
mtime = modified . pop ( path )
for parent in self . parents [ path ] :
modified [ parent ] = max ( mtime , self . parents [ parent ] . updated )
... |
def ensure_v8_src ( ) :
"""Ensure that v8 src are presents and up - to - date""" | path = local_path ( 'v8' )
if not os . path . isdir ( path ) :
fetch_v8 ( path )
else :
update_v8 ( path )
checkout_v8_version ( local_path ( "v8/v8" ) , V8_VERSION )
dependencies_sync ( path ) |
def check_gap ( xpub , api_key ) :
"""Call the ' v2 / receive / checkgap ' endpoint and returns the callback log
for a given callback URI with parameters .
: param str xpub : extended public key
: param str api _ key : Blockchain . info API V2 key
: return : an int""" | params = { 'key' : api_key , 'xpub' : xpub }
resource = 'v2/receive/checkgap?' + util . urlencode ( params )
resp = util . call_api ( resource , base_url = 'https://api.blockchain.info/' )
json_resp = json . loads ( resp )
return json_resp [ 'gap' ] |
def seek_missing_num ( sorted_array , num_elements ) :
"""Python function to determine the missing number in a sorted array .
The function takes a sorted input array and its length as a parameter , and finds the missing element .
Args :
sorted _ array ( list ) : The sorted array of integers .
num _ elements... | start_element = 0
end_element = num_elements - 1
while start_element <= end_element :
middle_element = int ( ( start_element + end_element ) / 2 )
if sorted_array [ middle_element ] != middle_element + 1 and sorted_array [ middle_element - 1 ] == middle_element :
return middle_element + 1
elif sorte... |
def list_algorithms ( self , page_size = None ) :
"""Lists the algorithms visible to this client .
Algorithms are returned in lexicographical order .
: rtype : : class : ` . Algorithm ` iterator""" | params = { }
if page_size is not None :
params [ 'limit' ] = page_size
return pagination . Iterator ( client = self . _client , path = '/mdb/{}/algorithms' . format ( self . _instance ) , params = params , response_class = mdb_pb2 . ListAlgorithmsResponse , items_key = 'algorithm' , item_mapper = Algorithm , ) |
def get_module_path ( modname ) :
"""Return module * modname * base path""" | return osp . abspath ( osp . dirname ( sys . modules [ modname ] . __file__ ) ) |
def class_for_type ( self , object_type ) :
"""Given an object _ type return the class associated with it .""" | if object_type not in self . class_mapping :
raise ZenpyException ( "Unknown object_type: " + str ( object_type ) )
else :
return self . class_mapping [ object_type ] |
def peek ( self , default = _marker ) :
"""Return the item that will be next returned from ` ` next ( ) ` ` .
Return ` ` default ` ` if there are no items left . If ` ` default ` ` is not
provided , raise ` ` StopIteration ` ` .""" | if not hasattr ( self , '_peek' ) :
try :
self . _peek = next ( self . _it )
except StopIteration :
if default is _marker :
raise
return default
return self . _peek |
async def generate_wallet_key ( config : Optional [ str ] ) -> str :
"""Generate wallet master key .
Returned key is compatible with " RAW " key derivation method .
It allows to avoid expensive key derivation for use cases when wallet keys can be stored in a secure enclave .
: param config : ( optional ) key ... | logger = logging . getLogger ( __name__ )
logger . debug ( "generate_wallet_key: >>> config: %r" , config )
if not hasattr ( generate_wallet_key , "cb" ) :
logger . debug ( "generate_wallet_key: Creating callback" )
generate_wallet_key . cb = create_cb ( CFUNCTYPE ( None , c_int32 , c_int32 , c_char_p ) )
c_con... |
def _validate_dependencies ( self , dependencies , field , value ) :
"""{ ' type ' : ( ' dict ' , ' hashable ' , ' list ' ) ,
' check _ with ' : ' dependencies ' }""" | if isinstance ( dependencies , _str_type ) or not isinstance ( dependencies , ( Iterable , Mapping ) ) :
dependencies = ( dependencies , )
if isinstance ( dependencies , Sequence ) :
self . __validate_dependencies_sequence ( dependencies , field )
elif isinstance ( dependencies , Mapping ) :
self . __valida... |
def verify ( token , key , algorithms , verify = True ) :
"""Verifies a JWS string ' s signature .
Args :
token ( str ) : A signed JWS to be verified .
key ( str or dict ) : A key to attempt to verify the payload with . Can be
individual JWK or JWK set .
algorithms ( str or list ) : Valid algorithms that ... | header , payload , signing_input , signature = _load ( token )
if verify :
_verify_signature ( signing_input , header , signature , key , algorithms )
return payload |
def print_gce_info ( zone , project , instance_name , data ) :
"""outputs information about our Rackspace instance""" | try :
instance_info = _get_gce_compute ( ) . instances ( ) . get ( project = project , zone = zone , instance = instance_name ) . execute ( )
log_yellow ( pformat ( instance_info ) )
log_green ( "Instance state: %s" % instance_info [ 'status' ] )
log_green ( "Ip address: %s" % data [ 'ip_address' ] )
ex... |
def node_dependencies ( context : Context ) :
"""Updates node . js dependencies""" | args = [ '--loglevel' , { 0 : 'silent' , 1 : 'warn' , 2 : 'info' } [ context . verbosity ] ]
if not context . use_colour :
args . append ( '--color false' )
args . append ( 'install' )
return context . shell ( 'npm' , * args ) |
def _run_validators ( self , value ) :
"""Perform validation on ` ` value ` ` . Raise a : exc : ` ValidationError ` if
validation does not succeed .""" | if value in self . empty_values :
return
errors = [ ]
for validator in self . validators :
try :
validator ( value )
except exceptions . ValidationError as err :
if isinstance ( err . messages , dict ) :
errors . append ( err . messages )
else :
errors . exten... |
def register_by_twine ( self ) :
"""register via the twine method
: return :""" | check_call_no_output ( [ '{}' . format ( self . python ) , 'setup.py' , 'bdist_wheel' , ] )
# at this point there should be only one file in the ' dist ' folder
filename = self . get_package_filename ( )
check_call_no_output ( [ 'twine' , 'register' , filename , ] ) |
def _handle_signals ( self , signum , frame ) :
"""Handler for all signals .
This method must be used to handle all signals for the process . It is
responsible for runnin the appropriate signal handlers registered with
the ' handle ' method unless they are shutdown signals . Shutdown signals
must trigger th... | if signum in self . kill_signals :
return self . shutdown ( signum )
for handler in self . _handlers [ signum ] :
handler ( ) |
def filter ( self , filters ) :
'''Apply filters to the pileup elements , and return a new Pileup with the
filtered elements removed .
Parameters
filters : list of PileupElement - > bool callables
A PileupUp element is retained if all filters return True when
called on it .''' | new_elements = [ e for e in self . elements if all ( function ( e ) for function in filters ) ]
return Pileup ( self . locus , new_elements ) |
def __verify_minion_publish ( self , load ) :
'''Verify that the passed information authorized a minion to execute''' | # Verify that the load is valid
if 'peer' not in self . opts :
return False
if not isinstance ( self . opts [ 'peer' ] , dict ) :
return False
if any ( key not in load for key in ( 'fun' , 'arg' , 'tgt' , 'ret' , 'id' ) ) :
return False
# If the command will make a recursive publish don ' t run
if re . matc... |
def list_functions ( mod_name ) :
"""Lists all functions declared in a module .
http : / / stackoverflow . com / a / 1107150/3004221
Args :
mod _ name : the module name
Returns :
A list of functions declared in that module .""" | mod = sys . modules [ mod_name ]
return [ func . __name__ for func in mod . __dict__ . values ( ) if is_mod_function ( mod , func ) ] |
def from_soup ( self , tag_prof_header , tag_prof_nav ) :
"""Returns the scraped user data from a twitter user page .
: param tag _ prof _ header : captures the left hand part of user info
: param tag _ prof _ nav : captures the upper part of user info
: return : Returns a User object with captured data via b... | self . user = tag_prof_header . find ( 'a' , { 'class' : 'ProfileHeaderCard-nameLink u-textInheritColor js-nav' } ) [ 'href' ] . strip ( "/" )
self . full_name = tag_prof_header . find ( 'a' , { 'class' : 'ProfileHeaderCard-nameLink u-textInheritColor js-nav' } ) . text
location = tag_prof_header . find ( 'span' , { 'c... |
def main ( ) :
"""Runs test imaging pipeline using MPI .""" | # Check command line arguments .
if len ( sys . argv ) < 2 :
raise RuntimeError ( 'Usage: mpiexec -n <np> ' 'python mpi_imager_test.py <settings_file> <dir>' )
# Get the MPI communicator and initialise broadcast variables .
comm = MPI . COMM_WORLD
settings = None
inputs = None
grid_weights = None
# Create log .
log... |
def new_type ( type_name : str , prefix : str or None = None ) -> str :
"""Creates a resource type with optionally a prefix .
Using the rules of JSON - LD , we use prefixes to disambiguate between different types with the same name :
one can Accept a device or a project . In eReuse . org there are different eve... | if Naming . TYPE_PREFIX in type_name :
raise TypeError ( 'Cannot create new type: type {} is already prefixed.' . format ( type_name ) )
prefix = ( prefix + Naming . TYPE_PREFIX ) if prefix is not None else ''
return prefix + type_name |
def groupby2 ( records , kfield , vfield ) :
""": param records : a sequence of records with positional or named fields
: param kfield : the index / name / tuple specifying the field to use as a key
: param vfield : the index / name / tuple specifying the field to use as a value
: returns : an list of pairs o... | if isinstance ( kfield , tuple ) :
kgetter = operator . itemgetter ( * kfield )
else :
kgetter = operator . itemgetter ( kfield )
if isinstance ( vfield , tuple ) :
vgetter = operator . itemgetter ( * vfield )
else :
vgetter = operator . itemgetter ( vfield )
dic = groupby ( records , kgetter , lambda r... |
def network_interface_present ( name , ip_configurations , subnet , virtual_network , resource_group , tags = None , virtual_machine = None , network_security_group = None , dns_settings = None , mac_address = None , primary = None , enable_accelerated_networking = None , enable_ip_forwarding = None , connection_auth =... | ret = { 'name' : name , 'result' : False , 'comment' : '' , 'changes' : { } }
if not isinstance ( connection_auth , dict ) :
ret [ 'comment' ] = 'Connection information must be specified via connection_auth dictionary!'
return ret
iface = __salt__ [ 'azurearm_network.network_interface_get' ] ( name , resource_g... |
def compact_allele_name ( raw_allele ) :
"""Turn HLA - A * 02:01 into A0201 or H - 2 - D - b into H - 2Db or
HLA - DPA1*01:05 - DPB1*100:01 into DPA10105 - DPB110001""" | parsed_alleles = parse_classi_or_classii_allele_name ( raw_allele )
normalized_list = [ ]
if len ( parsed_alleles ) == 2 :
alpha , beta = parsed_alleles
# by convention the alpha allelle is omitted since it ' s assumed
# to be DRA1*01:01
if alpha == _DRA1_0101 :
parsed_alleles = [ beta ]
for par... |
def _header ( self , pam = False ) :
"""Return file header as byte string .""" | if pam or self . magicnum == b'P7' :
header = "\n" . join ( ( "P7" , "HEIGHT %i" % self . height , "WIDTH %i" % self . width , "DEPTH %i" % self . depth , "MAXVAL %i" % self . maxval , "\n" . join ( "TUPLTYPE %s" % unicode ( i ) for i in self . tupltypes ) , "ENDHDR\n" ) )
elif self . maxval == 1 :
header = "P4... |
def generate_binary_ulid ( timestamp = None , monotonic = False ) :
"""Generate the bytes for an ULID .
: param timestamp : An optional timestamp override .
If ` None ` , the current time is used .
: type timestamp : int | float | datetime . datetime | None
: param monotonic : Attempt to ensure ULIDs are mo... | global _last_entropy , _last_timestamp
if timestamp is None :
timestamp = time . time ( )
elif isinstance ( timestamp , datetime . datetime ) :
timestamp = calendar . timegm ( timestamp . utctimetuple ( ) )
ts = int ( timestamp * 1000.0 )
ts_bytes = _to_binary ( ( ts >> shift ) & 0xFF for shift in ( 40 , 32 , 2... |
def _updateFrame ( self ) :
"""Updates the frame for the given sender .""" | mov = self . movie ( )
if mov :
self . setIcon ( QtGui . QIcon ( mov . currentPixmap ( ) ) ) |
def addFreetextAnnot ( self , rect , text , fontsize = 12 , fontname = None , color = None , rotate = 0 ) :
"""Add a ' FreeText ' annotation in rectangle ' rect ' .""" | CheckParent ( self )
val = _fitz . Page_addFreetextAnnot ( self , rect , text , fontsize , fontname , color , rotate )
if not val :
return
val . thisown = True
val . parent = weakref . proxy ( self )
self . _annot_refs [ id ( val ) ] = val
return val |
def post_message ( message , chat_id = None , token = None ) :
'''Send a message to a Telegram chat .
: param message : The message to send to the Telegram chat .
: param chat _ id : ( optional ) The Telegram chat id .
: param token : ( optional ) The Telegram API token .
: return : Boolean if message was s... | if not chat_id :
chat_id = _get_chat_id ( )
if not token :
token = _get_token ( )
if not message :
log . error ( 'message is a required option.' )
return _post_message ( message = message , chat_id = chat_id , token = token ) |
def cache_key ( self , template_name , skip = None ) :
"""Generate a cache key for the template name , dirs , and skip .
If skip is provided , only origins that match template _ name are included
in the cache key . This ensures each template is only parsed and cached
once if contained in different extend chai... | dirs_prefix = ''
skip_prefix = ''
tenant_prefix = ''
if skip :
matching = [ origin . name for origin in skip if origin . template_name == template_name ]
if matching :
skip_prefix = self . generate_hash ( matching )
if connection . tenant :
tenant_prefix = str ( connection . tenant . pk )
return '-'... |
def make_vslc_label ( self , gene_label , allele1_label , allele2_label ) :
"""Make a Variant Single Locus Complement ( VSLC ) in monarch - style .
: param gene _ label :
: param allele1 _ label :
: param allele2 _ label :
: return :""" | vslc_label = ''
if gene_label is None and allele1_label is None and allele2_label is None :
LOG . error ( "Not enough info to make vslc label" )
return None
top = self . make_variant_locus_label ( gene_label , allele1_label )
bottom = ''
if allele2_label is not None :
bottom = self . make_variant_locus_labe... |
def background_task_method ( task ) :
"""Decorate an object method as a background task ( called with help of
gearman ) .
You have to create a task which will handle the gearman call . The
method arguments will be encoded as JSON .
: param task : name of the task
: type task : str
: return : decorated f... | # TODO ako vysledok vrat nejaky JOB ID , aby sa dalo checkovat na pozadi
# TODO vytvorit este vseobecny background _ task nielen pre metody
def decorator_fn ( fn ) :
gearman = None
@ inject ( config = Config )
def gearman_connect ( config ) : # type : ( Config ) - > GearmanService
if 'GEARMAN' not i... |
def from_google ( cls , google_x , google_y , zoom ) :
"""Creates a tile from Google format X Y and zoom""" | max_tile = ( 2 ** zoom ) - 1
assert 0 <= google_x <= max_tile , 'Google X needs to be a value between 0 and (2^zoom) -1.'
assert 0 <= google_y <= max_tile , 'Google Y needs to be a value between 0 and (2^zoom) -1.'
return cls ( tms_x = google_x , tms_y = ( 2 ** zoom - 1 ) - google_y , zoom = zoom ) |
def is_local_maximum ( image , labels , footprint ) :
'''Return a boolean array of points that are local maxima
image - intensity image
labels - find maxima only within labels . Zero is reserved for background .
footprint - binary mask indicating the neighborhood to be examined
must be a matrix with odd dim... | assert ( ( np . all ( footprint . shape ) & 1 ) == 1 )
footprint = ( footprint != 0 )
footprint_extent = ( np . array ( footprint . shape ) - 1 ) // 2
if np . all ( footprint_extent == 0 ) :
return labels > 0
result = ( labels > 0 ) . copy ( )
# Create a labels matrix with zeros at the borders that might be
# hit b... |
def list_saml_providers ( region = None , key = None , keyid = None , profile = None ) :
'''List SAML providers .
CLI Example :
. . code - block : : bash
salt myminion boto _ iam . list _ saml _ providers''' | conn = _get_conn ( region = region , key = key , keyid = keyid , profile = profile )
try :
providers = [ ]
info = conn . list_saml_providers ( )
for arn in info [ 'list_saml_providers_response' ] [ 'list_saml_providers_result' ] [ 'saml_provider_list' ] :
providers . append ( arn [ 'arn' ] . rsplit ... |
def _mapped_populations ( mdl1 , mdl2 ) :
"""Method to get the populations for states in mdl 1
from populations inferred in mdl 2 . Resorts to 0
if population is not present .""" | return_vect = np . zeros ( mdl1 . n_states_ )
for i in range ( mdl1 . n_states_ ) :
try : # there has to be a better way to do this
mdl1_unmapped = mdl1 . inverse_transform ( [ i ] ) [ 0 ] [ 0 ]
mdl2_mapped = mdl2 . mapping_ [ mdl1_unmapped ]
return_vect [ i ] = mdl2 . populations_ [ mdl2_ma... |
def plan_validation ( user , plan = None , on_activation = False ) :
"""Validates validator that represents quotas in a given system
: param user :
: param plan :
: return :""" | if plan is None : # if plan is not given , the default is to use current plan of the user
plan = user . userplan . plan
quota_dict = plan . get_quota_dict ( )
validators = getattr ( settings , 'PLANS_VALIDATORS' , { } )
validators = import_name ( validators )
errors = { 'required_to_activate' : [ ] , 'other' : [ ] ... |
def get_index ( cls ) :
"""Gets the index for this model .
The index for this model is specified in ` settings . ES _ INDEXES `
which is a dict of mapping type - > index name .
By default , this uses ` . get _ mapping _ type ( ) ` to determine the
mapping and returns the value in ` settings . ES _ INDEXES `... | indexes = settings . ES_INDEXES
index = indexes . get ( cls . get_mapping_type_name ( ) ) or indexes [ 'default' ]
if not ( isinstance ( index , six . string_types ) ) : # FIXME - not sure what to do here , but we only want one
# index and somehow this isn ' t one index .
index = index [ 0 ]
return index |
def _makedirs ( name , mode = 0o777 , exist_ok = False ) :
"""Source : https : / / github . com / python / cpython / blob /
3ce3dea60646d8a5a1c952469a2eb65f937875b3 / Lib / os . py # L196 - L226""" | head , tail = os . path . split ( name )
if not tail :
head , tail = os . path . split ( head )
if head and tail and not os . path . exists ( head ) :
try :
_makedirs ( head , exist_ok = exist_ok )
except OSError as e :
if e . errno != errno . EEXIST :
raise
cdir = os . curdi... |
def generate_boosted_machine ( self ) :
"""generate _ boosted _ machine ( ) - > strong
Creates a single strong classifier from this cascade by concatenating all strong classifiers .
* * Returns : * *
` ` strong ` ` : : py : class : ` bob . learn . boosting . BoostedMachine `
The strong classifier as a combi... | strong = bob . learn . boosting . BoostedMachine ( )
for machine , index in zip ( self . cascade , self . indices ) :
weak = machine . weak_machines
weights = machine . weights
for i in range ( len ( weak ) ) :
strong . add_weak_machine ( weak [ i ] , weights [ i ] )
return strong |
def get_op_restrictions_by_content_operation ( self , content_id , operation_key , expand = None , start = None , limit = None , callback = None ) :
"""Returns info about all restrictions of given operation .
: param content _ id ( string ) : The content ID to query on .
: param operation _ key ( string ) : The... | params = { }
if expand :
params [ "expand" ] = expand
if start is not None :
params [ "start" ] = int ( start )
if limit is not None :
params [ "limit" ] = int ( limit )
return self . _service_get_request ( "rest/api/content/{id}/restriction/byOperation/{opkey}" "" . format ( id = content_id , opkey = opera... |
def add_domain_name ( list_name , item_name ) :
'''Adds a domain name to a domain name list .
list _ name ( str ) : The name of the specific policy domain name list to append to .
item _ name ( str ) : The domain name to append .
CLI Example :
. . code - block : : bash
salt ' * ' bluecoat _ sslv . add _ d... | payload = { "jsonrpc" : "2.0" , "id" : "ID0" , "method" : "add_policy_domain_names" , "params" : [ list_name , { "item_name" : item_name } ] }
response = __proxy__ [ 'bluecoat_sslv.call' ] ( payload , True )
return _validate_change_result ( response ) |
def remove_frequencies ( self , fmin , fmax ) :
"""Remove frequencies from the dataset""" | self . data . query ( 'frequency > {0} and frequency < {1}' . format ( fmin , fmax ) , inplace = True )
g = self . data . groupby ( 'frequency' )
print ( 'Remaining frequencies:' )
print ( sorted ( g . groups . keys ( ) ) ) |
def create ( self , key , value ) :
"""Atomically create the given key only if the key doesn ' t exist .
This verifies that the create _ revision of a key equales to 0 , then
creates the key with the value .
This operation takes place in a transaction .
: param key : key in etcd to create
: param value : ... | base64_key = _encode ( key )
base64_value = _encode ( value )
txn = { 'compare' : [ { 'key' : base64_key , 'result' : 'EQUAL' , 'target' : 'CREATE' , 'create_revision' : 0 } ] , 'success' : [ { 'request_put' : { 'key' : base64_key , 'value' : base64_value , } } ] , 'failure' : [ ] }
result = self . transaction ( txn )
... |
def removeDataFrameColumns ( self , columns ) :
"""Removes columns from the dataframe .
: param columns : [ ( int , str ) ]
: return : ( bool )
True on success , False on failure .""" | if not self . editable :
return False
if columns :
deleted = 0
errored = False
for ( position , name ) in columns :
position = position - deleted
if position < 0 :
position = 0
self . beginRemoveColumns ( QtCore . QModelIndex ( ) , position , position )
try :
... |
def arguments_from_optionable ( parser , component , prefix = "" ) :
"""Add argparse arguments from all options of one : class : ` Optionable `
> > > # Let ' s build a dummy optionable component :
> > > comp = Optionable ( )
> > > comp . add _ option ( " num " , Numeric ( default = 1 , max = 12 , help = " An ... | for option in component . options :
if component . options [ option ] . hidden :
continue
argument_from_option ( parser , component , option , prefix = prefix ) |
def host_keys ( keydir = None , private = True , certs = True ) :
'''Return the minion ' s host keys
CLI Example :
. . code - block : : bash
salt ' * ' ssh . host _ keys
salt ' * ' ssh . host _ keys keydir = / etc / ssh
salt ' * ' ssh . host _ keys keydir = / etc / ssh private = False
salt ' * ' ssh . h... | # TODO : support parsing sshd _ config for the key directory
if not keydir :
if __grains__ [ 'kernel' ] == 'Linux' :
keydir = '/etc/ssh'
else : # If keydir is None , os . listdir ( ) will blow up
raise SaltInvocationError ( 'ssh.host_keys: Please specify a keydir' )
keys = { }
fnre = re . compil... |
def _verify_params ( self ) :
"""Verifies the parameters don ' t use any reserved parameter .
Raises :
ValueError : If a reserved parameter is used .""" | reserved_in_use = self . _RESERVED_PARAMS . intersection ( self . extra_params )
if reserved_in_use :
raise ValueError ( "Using a reserved parameter" , reserved_in_use ) |
def raw_urlsafe_b64encode ( b ) :
'''Base64 encode using URL - safe encoding with padding removed .
@ param b bytes to decode
@ return bytes decoded''' | b = to_bytes ( b )
b = base64 . urlsafe_b64encode ( b )
b = b . rstrip ( b'=' )
# strip padding
return b |
def _generate_examples ( self , split_subsets , extraction_map ) :
"""Returns the examples in the raw ( text ) form .""" | source , _ = self . builder_config . language_pair
def _get_local_paths ( ds , extract_dirs ) :
rel_paths = ds . get_path ( source )
if len ( extract_dirs ) == 1 :
extract_dirs = extract_dirs * len ( rel_paths )
return [ os . path . join ( ex_dir , rel_path ) if rel_path else ex_dir for ex_dir , rel... |
def browse ( self ) :
"""Utility to browse through the records in the warc file .
This returns an iterator over ( record , offset , size ) for each record in
the file . If the file is gzip compressed , the offset and size will
corresponds to the compressed file .
The payload of each record is limited to 1MB... | offset = 0
for record in self . reader : # Just read the first 1MB of the payload .
# This will make sure memory consuption is under control and it
# is possible to look at the first MB of the payload , which is
# typically sufficient to read http headers in the payload .
record . payload = StringIO ( record . payl... |
def scan_to_best_match ( fname , motifs , ncpus = None , genome = None , score = False ) :
"""Scan a FASTA file with motifs .
Scan a FASTA file and return a dictionary with the best match per motif .
Parameters
fname : str
Filename of a sequence file in FASTA format .
motifs : list
List of motif instanc... | # Initialize scanner
s = Scanner ( ncpus = ncpus )
s . set_motifs ( motifs )
s . set_threshold ( threshold = 0.0 )
if genome :
s . set_genome ( genome )
if isinstance ( motifs , six . string_types ) :
motifs = read_motifs ( motifs )
logger . debug ( "scanning %s..." , fname )
result = dict ( [ ( m . id , [ ] ) ... |
def _parse ( name ) :
"""Return dict of parts forming ` name ` . Raise ` ValueError ` if string
` name ` cannot be correctly parsed .
The default implementation uses
` NodeNamingPolicy . _ NODE _ NAME _ RE ` to parse the name back into
constituent parts .
This is ideally the inverse of : meth : ` _ format... | match = NodeNamingPolicy . _NODE_NAME_RE . match ( name )
if match :
return match . groupdict ( )
else :
raise ValueError ( "Cannot parse node name `{name}`" . format ( name = name ) ) |
def publish_and_get_event ( self , resource ) :
"""Publish and get the event from base station .""" | l_subscribed = False
this_event = None
if not self . __subscribed :
self . _get_event_stream ( )
self . _subscribe_myself ( )
l_subscribed = True
status = self . publish ( action = 'get' , resource = resource , mode = None , publish_response = False )
if status == 'success' :
i = 0
while not this_ev... |
def check_migrations_applied ( migrate ) :
"""A built - in check to see if all migrations have been applied correctly .
It ' s automatically added to the list of Dockerflow checks if a
` flask _ migrate . Migrate < https : / / flask - migrate . readthedocs . io / > ` _ object
is passed to the : class : ` ~ do... | errors = [ ]
from alembic . migration import MigrationContext
from alembic . script import ScriptDirectory
from sqlalchemy . exc import DBAPIError , SQLAlchemyError
# pass in Migrate . directory here explicitly to be compatible with
# older versions of Flask - Migrate that required the directory to be passed
config = m... |
def serve_protected_file ( request , path ) :
"""Serve protected files to authenticated users with read permissions .""" | path = path . rstrip ( '/' )
try :
file_obj = File . objects . get ( file = path )
except File . DoesNotExist :
raise Http404 ( 'File not found %s' % path )
if not file_obj . has_read_permission ( request ) :
if settings . DEBUG :
raise PermissionDenied
else :
raise Http404 ( 'File not f... |
def search_course ( self , xqdm , kcdm = None , kcmc = None ) :
"""课程查询
@ structure [ { ' 任课教师 ' : str , ' 课程名称 ' : str , ' 教学班号 ' : str , ' 课程代码 ' : str , ' 班级容量 ' : int } ]
: param xqdm : 学期代码
: param kcdm : 课程代码
: param kcmc : 课程名称""" | return self . query ( SearchCourse ( xqdm , kcdm , kcmc ) ) |
async def connect_async ( self , loop = None , timeout = None ) :
"""Set up async connection on specified event loop or
on default event loop .""" | if self . deferred :
raise Exception ( "Error, database not properly initialized " "before opening connection" )
if self . _async_conn :
return
elif self . _async_wait :
await self . _async_wait
else :
self . _loop = loop
self . _async_wait = asyncio . Future ( loop = self . _loop )
conn = self ... |
def import_parallel_gateway_to_graph ( diagram_graph , process_id , process_attributes , element ) :
"""Adds to graph the new element that represents BPMN parallel gateway .
Parallel gateway doesn ' t have additional attributes . Separate method is used to improve code readability .
: param diagram _ graph : Ne... | BpmnDiagramGraphImport . import_gateway_to_graph ( diagram_graph , process_id , process_attributes , element ) |
async def count ( query , clear_limit = False ) :
"""Perform * COUNT * aggregated query asynchronously .
: return : number of objects in ` ` select ( ) ` ` query""" | clone = query . clone ( )
if query . _distinct or query . _group_by or query . _limit or query . _offset :
if clear_limit :
clone . _limit = clone . _offset = None
sql , params = clone . sql ( )
wrapped = 'SELECT COUNT(1) FROM (%s) AS wrapped_select' % sql
raw = query . model . raw ( wrapped , *... |
def _parse_button ( self , keypad , component_xml ) :
"""Parses a button device that part of a keypad .""" | button_xml = component_xml . find ( 'Button' )
name = button_xml . get ( 'Engraving' )
button_type = button_xml . get ( 'ButtonType' )
direction = button_xml . get ( 'Direction' )
# Hybrid keypads have dimmer buttons which have no engravings .
if button_type == 'SingleSceneRaiseLower' :
name = 'Dimmer ' + direction... |
def define ( self , id , schema ) :
"""Add a schema to the list of definition
: param id : id of the schema .
: param schema : the schema as a dict or a
: class : schemabuilder . primitives . Generic
: return : reference to schema .
: rtype : : class : ` schemabuilder . schema . Ref `""" | self . definitions [ id ] = schema
self . _schema = None
return self . ref ( id ) |
def global_set ( self , key , value ) :
"""Set ` ` key ` ` to ` ` value ` ` globally ( not at any particular branch or
revision )""" | ( key , value ) = map ( self . pack , ( key , value ) )
try :
return self . sql ( 'global_insert' , key , value )
except IntegrityError :
return self . sql ( 'global_update' , value , key ) |
def version_binary ( self ) :
'''Return version number which is stored in binary format .
Returns :
str : < major 0-255 > . < minior 0-255 > . < build 0-65535 > or None if not found''' | # Under MSI ' Version ' is a ' REG _ DWORD ' which then sets other registry
# values like DisplayVersion to x . x . x to the same value .
# However not everyone plays by the rules , so we need to check first .
# version _ binary _ data will be None if the reg value does not exist .
# Some installs set ' Version ' to RE... |
def castroData_from_ipix ( self , ipix , colwise = False ) :
"""Build a CastroData object for a particular pixel""" | # pix = utils . skydir _ to _ pix
if colwise :
ipix = self . _tsmap . ipix_swap_axes ( ipix , colwise )
norm_d = self . _norm_vals [ ipix ]
nll_d = self . _nll_vals [ ipix ]
return CastroData ( norm_d , nll_d , self . _refSpec , self . _norm_type ) |
def searchtop ( self , n = 10 ) :
"""Return the top n best resulta ( or possibly less if not enough is found )""" | solutions = PriorityQueue ( [ ] , lambda x : x . score , self . minimize , length = n , blockworse = False , blockequal = False , duplicates = False )
for solution in self :
solutions . append ( solution )
return solutions |
def upload_file ( self , api_token , file_path , ** kwargs ) :
"""Upload a file suitable to be passed as a file _ attachment .
: param api _ token : The user ' s login api _ token .
: type api _ token : str
: param file _ path : The path of the file to be uploaded .
: type file _ path : str
: return : The... | params = { 'token' : api_token , 'file_name' : os . path . basename ( file_path ) }
with open ( file_path , 'rb' ) as f :
files = { 'file' : f }
return self . _post ( 'upload_file' , params , files , ** kwargs ) |
def handleTickGeneric ( self , msg ) :
"""holds latest tick bid / ask / last price""" | df2use = self . marketData
if self . contracts [ msg . tickerId ] . m_secType in ( "OPT" , "FOP" ) :
df2use = self . optionsData
# create tick holder for ticker
if msg . tickerId not in df2use . keys ( ) :
df2use [ msg . tickerId ] = df2use [ 0 ] . copy ( )
if msg . tickType == dataTypes [ "FIELD_OPTION_IMPLIED... |
def makeB64UrlSafe ( b64str ) :
"""Make a base64 string URL Safe""" | if isinstance ( b64str , six . text_type ) :
b64str = b64str . encode ( )
# see RFC 4648 , sec . 5
return b64str . replace ( b'+' , b'-' ) . replace ( b'/' , b'_' ) |
def key_func ( * keys , ** kwargs ) :
"""Creates a " key function " based on given keys .
Resulting function will perform lookup using specified keys , in order ,
on the object passed to it as an argument .
For example , ` ` key _ func ( ' a ' , ' b ' ) ( foo ) ` ` is equivalent to ` ` foo [ ' a ' ] [ ' b ' ]... | ensure_argcount ( keys , min_ = 1 )
ensure_keyword_args ( kwargs , optional = ( 'default' , ) )
keys = list ( map ( ensure_string , keys ) )
if 'default' in kwargs :
default = kwargs [ 'default' ]
def getitems ( obj ) :
for key in keys :
try :
obj = obj [ key ]
ex... |
def update_picks ( self , games = None , points = None ) :
'''games can be dict of { game . id : winner _ id } for all picked games to update''' | if games :
game_dict = { g . id : g for g in self . gameset . games . filter ( id__in = games ) }
game_picks = { pick . game . id : pick for pick in self . gamepicks . filter ( game__id__in = games ) }
for key , winner in games . items ( ) :
game = game_dict [ key ]
if not game . has_started... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.