signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def setWriteToShell ( self , writeToShell = True ) :
"""connect sysout to the qtSignal""" | if writeToShell and not self . _connected :
self . message . connect ( self . stdW )
self . _connected = True
elif not writeToShell and self . _connected :
try :
self . message . disconnect ( self . stdW )
except TypeError :
pass
# was not connected
self . _connected = False |
def get_sensor_consumption ( self , service_location_id , sensor_id , start , end , aggregation ) :
"""Request consumption for a given sensor in a given service location
Parameters
service _ location _ id : int
sensor _ id : int
start : int | dt . datetime | pd . Timestamp
end : int | dt . datetime | pd .... | url = urljoin ( URLS [ 'servicelocation' ] , service_location_id , "sensor" , sensor_id , "consumption" )
return self . _get_consumption ( url = url , start = start , end = end , aggregation = aggregation ) |
def perform ( self , cmd , msg = '' , extra_headers = None ) :
"""Perform the call""" | tries = 0
while 1 :
conn = None
try :
conn = self . get_connection ( )
if hasattr ( msg , 'read' ) and hasattr ( msg , 'fileno' ) :
msg_length = str ( os . fstat ( msg . fileno ( ) ) . st_size )
elif hasattr ( msg , 'read' ) :
msg . seek ( 0 , 2 )
msg_... |
def time_window_cutoff ( sw_time , time_cutoff ) :
"""Allows for cutting the declustering time window at a specific time , outside
of which an event of any magnitude is no longer identified as a cluster""" | sw_time = np . array ( [ ( time_cutoff / DAYS ) if x > ( time_cutoff / DAYS ) else x for x in sw_time ] )
return ( sw_time ) |
def plot_labels ( labels , lattice = None , coords_are_cartesian = False , ax = None , ** kwargs ) :
"""Adds labels to a matplotlib Axes
Args :
labels : dict containing the label as a key and the coordinates as value .
lattice : Lattice object used to convert from reciprocal to cartesian coordinates
coords ... | ax , fig , plt = get_ax3d_fig_plt ( ax )
if "color" not in kwargs :
kwargs [ "color" ] = "b"
if "size" not in kwargs :
kwargs [ "size" ] = 25
for k , coords in labels . items ( ) :
label = k
if k . startswith ( "\\" ) or k . find ( "_" ) != - 1 :
label = "$" + k + "$"
off = 0.01
if coord... |
def _erase_buffer ( self , output_buffer ) :
"""Erase readings in the specified buffer to make space .""" | erase_size = self . _model . get ( u'buffer_erase_size' )
buffer_type = u'storage'
if output_buffer :
buffer_type = u'streaming'
old_readings = self . _engine . popn ( buffer_type , erase_size )
# Now go through all of our walkers that could match and
# update their availability counts and data buffer pointers
for ... |
def reinverted ( n , r ) :
"""Integer with reversed and inverted bits of n assuming bit length r .
> > > reinverted ( 1 , 6)
31
> > > [ reinverted ( x , 6 ) for x in [ 7 , 11 , 13 , 14 , 19 , 21 , 22 , 25 , 26 , 28 ] ]
[7 , 11 , 19 , 35 , 13 , 21 , 37 , 25 , 41 , 49]""" | result = 0
r = 1 << ( r - 1 )
while n :
if not n & 1 :
result |= r
r >>= 1
n >>= 1
if r :
result |= ( r << 1 ) - 1
return result |
def check_install ( ) :
"""Try to detect the two most common installation errors :
1 . Installing on macOS using a Homebrew version of Python
2 . Installing on Linux using Python 2 when GDB is linked with Python 3""" | if platform . system ( ) == 'Darwin' and sys . executable != '/usr/bin/python' :
print ( "*" * 79 )
print ( textwrap . fill ( "WARNING: You are not using the version of Python included with " "macOS. If you intend to use Voltron with the LLDB included " "with Xcode, or GDB installed with Homebrew, it will not w... |
def _create_model ( self , X , Y ) :
"""Creates the model given some input data X and Y .""" | # - - - define kernel
self . input_dim = X . shape [ 1 ]
if self . kernel is None :
kern = GPy . kern . Matern52 ( self . input_dim , variance = 1. , ARD = self . ARD )
# + GPy . kern . Bias ( self . input _ dim )
else :
kern = self . kernel
self . kernel = None
# - - - define model
noise_var = Y . var ... |
def _check_cb ( cb_ ) :
'''If the callback is None or is not callable , return a lambda that returns
the value passed .''' | if cb_ is not None :
if hasattr ( cb_ , '__call__' ) :
return cb_
else :
log . error ( 'log_callback is not callable, ignoring' )
return lambda x : x |
def read_separated_lines ( path , separator = ' ' , max_columns = - 1 , keep_empty = False ) :
"""Reads a text file where each line represents a record with some separated columns .
Parameters :
path ( str ) : Path to the file to read .
separator ( str ) : Separator that is used to split the columns .
max _... | gen = read_separated_lines_generator ( path , separator , max_columns , keep_empty = keep_empty )
return list ( gen ) |
def direction ( layer , vec , batch = None , cossim_pow = 0 ) :
"""Visualize a direction""" | if batch is None :
vec = vec [ None , None , None ]
return lambda T : _dot_cossim ( T ( layer ) , vec )
else :
vec = vec [ None , None ]
return lambda T : _dot_cossim ( T ( layer ) [ batch ] , vec ) |
def power_cycle_vm ( virtual_machine , action = 'on' ) :
'''Powers on / off a virtual machine specified by it ' s name .
virtual _ machine
vim . VirtualMachine object to power on / off virtual machine
action
Operation option to power on / off the machine''' | if action == 'on' :
try :
task = virtual_machine . PowerOn ( )
task_name = 'power on'
except vim . fault . NoPermission as exc :
log . exception ( exc )
raise salt . exceptions . VMwareApiError ( 'Not enough permissions. Required privilege: ' '{}' . format ( exc . privilegeId ) )... |
def _clear_expired_zones ( self ) :
"""Update zone status for all expired zones .""" | zones = [ ]
for z in list ( self . _zones . keys ( ) ) :
zones += [ z ]
for z in zones :
if self . _zones [ z ] . status != Zone . CLEAR and self . _zone_expired ( z ) :
self . _update_zone ( z , Zone . CLEAR ) |
def draw ( self , ** kwargs ) :
"""Draws the bar plot of the ranking array of features .""" | if self . orientation_ == 'h' : # Make the plot
self . ax . barh ( np . arange ( len ( self . ranks_ ) ) , self . ranks_ , color = 'b' )
# Add ticks and tick labels
self . ax . set_yticks ( np . arange ( len ( self . ranks_ ) ) )
if self . show_feature_names_ :
self . ax . set_yticklabels ( self... |
def events_filter ( self , topics : List [ str ] = None , from_block : BlockSpecification = None , to_block : BlockSpecification = None , ) -> StatelessFilter :
"""Install a new filter for an array of topics emitted by the contract .
Args :
topics : A list of event ids to filter for . Can also be None ,
in wh... | return self . client . new_filter ( self . address , topics = topics , from_block = from_block , to_block = to_block , ) |
def covar ( X , remove_mean = False , modify_data = False , weights = None , sparse_mode = 'auto' , sparse_tol = 0.0 ) :
"""Computes the covariance matrix of X
Computes
. . math :
C _ XX & = & X ^ \t op X
while exploiting zero or constant columns in the data matrix .
WARNING : Directly use moments _ XX if... | w , s , M = moments_XX ( X , remove_mean = remove_mean , weights = weights , modify_data = modify_data , sparse_mode = sparse_mode , sparse_tol = sparse_tol )
return M / float ( w ) |
def edges ( self , nodes = None ) :
"""Returns a ` ` tuple ` ` of all edges in the ` ` DictGraph ` ` an edge is a pair
of * * node objects * * .
Arguments :
- nodes ( iterable ) [ default : ` ` None ` ` ] iterable of * * node objects * * if
specified the edges will be limited to those outgoing from one of
... | # If a Node has been directly updated ( _ _ not _ _ recommended )
# then the Graph will not know the added nodes and therefore will
# miss half of their edges .
edges = set ( )
for node in ( nodes or self . iterkeys ( ) ) :
ends = self [ node ] . nodes ( )
edges . update ( [ ( node , end ) for end in ends ] )
r... |
def destroy ( self ) :
"""Destoys the app . Do be careful .""" | r = self . _h . _http_resource ( method = 'DELETE' , resource = ( 'apps' , self . name ) )
return r . ok |
def type_profile2 ( sequence , TypedSequence = None ) :
"""similar to depth _ profile but reports types
Args :
sequence ( ? ) :
compress _ homogenous ( bool ) : ( default = True )
Returns :
str : level _ type _ str
CommandLine :
python - m utool . util _ list - - exec - type _ profile2
Example :
>... | if TypedSequence is None :
TypedSequence = type_sequence_factory ( )
# For a pure bottom level list return the length
# if not any ( map ( util _ type . is _ listlike , sequence ) ) or ( isinstance ( sequence , np . ndarray ) and sequence . dtype ! = object ) :
if not util_type . is_listlike ( sequence ) or ( isins... |
def _filter_execs ( self , isSubroutine ) :
"""Filters the executables in the dictionary by their type .""" | result = { }
for key in self . executables :
if ( isinstance ( self . executables [ key ] , Subroutine ) and isSubroutine ) or ( isinstance ( self . executables [ key ] , Function ) and not isSubroutine ) :
result [ key ] = self . executables [ key ]
return result |
def clean ( tf_matrix , tf_matrix_gene_names , target_gene_name ) :
""": param tf _ matrix : numpy array . The full transcription factor matrix .
: param tf _ matrix _ gene _ names : the full list of transcription factor names , corresponding to the tf _ matrix columns .
: param target _ gene _ name : the targe... | if target_gene_name not in tf_matrix_gene_names :
clean_tf_matrix = tf_matrix
else :
clean_tf_matrix = np . delete ( tf_matrix , tf_matrix_gene_names . index ( target_gene_name ) , 1 )
clean_tf_names = [ tf for tf in tf_matrix_gene_names if tf != target_gene_name ]
assert clean_tf_matrix . shape [ 1 ] == len ( ... |
def _matches_prop ( self , obj , prop_name , prop_match ) :
"""Return a boolean indicating whether a resource object matches with
a single property against a property match value .
This is used for client - side filtering .
Depending on the specified property , this method retrieves the resource
properties ... | if isinstance ( prop_match , ( list , tuple ) ) : # List items are logically ORed , so one matching item suffices .
for pm in prop_match :
if self . _matches_prop ( obj , prop_name , pm ) :
return True
else : # Some lists of resources do not have all properties , for example
# Hipersocket adapte... |
def _calculate_influence ( self , influence_lambda ) :
"""Calculate the ranking influence .""" | return np . exp ( - np . arange ( self . num_neurons ) / influence_lambda ) [ : , None ] |
def update_args ( self , override_args ) :
"""Update the argument used to invoke the application
Note that this will also update the dictionary of input
and output files .
Parameters
override _ args : dict
dictionary passed to the links""" | self . args = extract_arguments ( override_args , self . args )
self . _map_arguments ( self . args )
scratch_dir = self . args . get ( 'scratch' , None )
if is_not_null ( scratch_dir ) :
self . _file_stage = FileStageManager ( scratch_dir , '.' )
for link in self . _links . values ( ) :
link . _set_file_stage ... |
def _assemble_conversion ( self , stmt ) :
"""Example : p ( HGNC : HK1 ) = > rxn ( reactants ( a ( CHEBI : " CHEBI : 17634 " ) ) ,
products ( a ( CHEBI : " CHEBI : 4170 " ) ) )""" | pybel_lists = ( [ ] , [ ] )
for pybel_list , agent_list in zip ( pybel_lists , ( stmt . obj_from , stmt . obj_to ) ) :
for agent in agent_list :
node = _get_agent_grounding ( agent )
# TODO check for missing grounding ?
pybel_list . append ( node )
rxn_node_data = reaction ( reactants = pybe... |
def default_args ( self ) :
"""Parse args and return default args .""" | if self . _default_args is None :
self . _default_args , unknown = self . parser . parse_known_args ( )
# pylint : disable = W0612
# reinitialize logger with new log level and api settings
self . tcex . _logger ( )
if self . _default_args . tc_aot_enabled : # block for AOT message and get params
... |
def readShiftFile ( self , filename ) :
"""Reads a shift file from disk and populates a dictionary .""" | order = [ ]
fshift = open ( filename , 'r' )
flines = fshift . readlines ( )
fshift . close ( )
common = [ f . strip ( '#' ) . strip ( ) for f in flines if f . startswith ( '#' ) ]
c = [ line . split ( ': ' ) for line in common ]
# Remove any line comments in the shift file - lines starting with ' # '
# but not part of... |
def draw_objects ( self , objects , bounds , img ) :
'''draw objects on the image''' | keys = objects . keys ( )
keys . sort ( )
for k in keys :
obj = objects [ k ]
bounds2 = obj . bounds ( )
if bounds2 is None or mp_util . bounds_overlap ( bounds , bounds2 ) :
obj . draw ( img , self . pixmapper , bounds ) |
def rename_datastore ( datastore_ref , new_datastore_name ) :
'''Renames a datastore
datastore _ ref
vim . Datastore reference to the datastore object to be changed
new _ datastore _ name
New datastore name''' | ds_name = get_managed_object_name ( datastore_ref )
log . trace ( "Renaming datastore '%s' to '%s'" , ds_name , new_datastore_name )
try :
datastore_ref . RenameDatastore ( new_datastore_name )
except vim . fault . NoPermission as exc :
log . exception ( exc )
raise salt . exceptions . VMwareApiError ( 'Not... |
def lpushx ( self , key , * values ) :
"""Insert values at the head of an existing list .
: param key : The list ' s key
: type key : : class : ` str ` , : class : ` bytes `
: param values : One or more positional arguments to insert at the
beginning of the list . Each value is inserted at the beginning
o... | return self . _execute ( [ b'LPUSHX' , key ] + list ( values ) ) |
def values ( ) :
"""Get the full current set of B3 values .
: return : A dict containing the keys " X - B3 - TraceId " , " X - B3 - ParentSpanId " , " X - B3 - SpanId " , " X - B3 - Sampled " and
" X - B3 - Flags " for the current span or subspan . NB some of the values are likely be None , but
all keys will ... | result = { }
try : # Check if there ' s a sub - span in progress , otherwise use the main span :
span = g . get ( "subspan" ) if "subspan" in g else g
for header in b3_headers :
result [ header ] = span . get ( header )
except RuntimeError : # We ' re probably working outside the Application Context at ... |
def createHiddenFolder ( self ) -> 'File' :
"""Create Hidden Folder
Create a hidden folder . Raise exception if auto delete isn ' t True .
@ return : Created folder .""" | if not self . _autoDelete :
raise Exception ( "Hidden folders can only be created within" " an autoDelete directory" )
return tempfile . mkdtemp ( dir = self . _path , prefix = "." ) |
def get_filtered_graph ( self , relations = None , prefix = None ) :
"""Returns a networkx graph for the whole ontology , for a subset of relations
Only implemented for eager methods .
Implementation notes : currently this is not cached
Arguments
- relations : list
list of object property IDs , e . g . su... | # trigger synonym cache
self . all_synonyms ( )
self . all_obsoletes ( )
# default method - wrap get _ graph
srcg = self . get_graph ( )
if prefix is not None :
srcg = srcg . subgraph ( [ n for n in srcg . nodes ( ) if n . startswith ( prefix + ":" ) ] )
if relations is None :
logger . info ( "No filtering on "... |
def unpack ( stream , ** kwargs ) :
'''. . versionadded : : 2018.3.4
Wraps msgpack . unpack .
By default , this function uses the msgpack module and falls back to
msgpack _ pure , if the msgpack is not available . You can pass an alternate
msgpack module using the _ msgpack _ module argument .''' | msgpack_module = kwargs . pop ( '_msgpack_module' , msgpack )
return msgpack_module . unpack ( stream , ** kwargs ) |
def cmd_sync ( self , low , timeout = None , full_return = False ) :
'''Execute a runner function synchronously ; eauth is respected
This function requires that : conf _ master : ` external _ auth ` is configured
and the user is authorized to execute runner functions : ( ` ` @ runner ` ` ) .
. . code - block ... | event = salt . utils . event . get_master_event ( self . opts , self . opts [ 'sock_dir' ] , listen = True )
job = self . master_call ( ** low )
ret_tag = salt . utils . event . tagify ( 'ret' , base = job [ 'tag' ] )
if timeout is None :
timeout = self . opts . get ( 'rest_timeout' , 300 )
ret = event . get_event ... |
def shrink ( self ) :
"""Calculate the Constant - Correlation covariance matrix .
: return : shrunk sample covariance matrix
: rtype : np . ndarray""" | x = np . nan_to_num ( self . X . values )
# de - mean returns
t , n = np . shape ( x )
meanx = x . mean ( axis = 0 )
x = x - np . tile ( meanx , ( t , 1 ) )
# compute sample covariance matrix
sample = ( 1.0 / t ) * np . dot ( x . T , x )
# compute prior
var = np . diag ( sample ) . reshape ( - 1 , 1 )
sqrtvar = np . sq... |
def get_list_of_paths ( self ) :
"""return a list of unique paths in the file list""" | all_paths = [ ]
for p in self . fl_metadata :
try :
all_paths . append ( p [ 'path' ] )
except :
try :
print ( 'cls_filelist - no key path, ignoring folder ' + str ( p ) )
except :
print ( 'cls_filelist - no key path, ignoring odd character folder' )
return list (... |
def find_types ( observatory , match = None , trend = None , connection = None , ** connection_kw ) :
"""Find the available data types for a given observatory .
See also
gwdatafind . http . HTTPConnection . find _ types
FflConnection . find _ types
for details on the underlying method ( s )""" | return sorted ( connection . find_types ( observatory , match = match ) , key = lambda x : _type_priority ( observatory , x , trend = trend ) ) |
def RunJob ( self , job ) :
"""Does the actual work of the Cron , if the job is due to run .
Args :
job : The cronjob rdfvalue that should be run . Must be leased .
Returns :
A boolean indicating if this cron job was started or not . False may
be returned when the threadpool is already full .
Raises :
... | if not job . leased_until :
raise LockError ( "CronJob must be leased for Run() to be called." )
if job . leased_until < rdfvalue . RDFDatetime . Now ( ) :
raise LockError ( "CronJob lease expired for %s." % job . cron_job_id )
logging . info ( "Starting cron job: %s" , job . cron_job_id )
if job . args . actio... |
def prerequisite_check ( ) :
"""Check prerequisites of the framework , including Python version , installation of
modules , etc .
Returns :
Optional [ str ] : If the check is not passed , return error message regarding
failed test case . None is returned otherwise .""" | # Check Python version
if sys . version_info < ( 3 , 6 ) :
version_str = "%s.%s.%s" % sys . version_info [ : 3 ]
# TRANSLATORS : This word is used as a part of search query suggested to users ,
# it may appears in context like " Ubuntu 16.04 install Python 3.7"
search_url = build_search_query ( _ ( "ins... |
def list_syntax ( self ) :
'''Prints a list of available syntax for the current paste service''' | syntax_list = [ 'Available syntax for %s:' % ( self ) ]
logging . info ( syntax_list [ 0 ] )
for key in self . SYNTAX_DICT . keys ( ) :
syntax = '\t%-20s%-30s' % ( key , self . SYNTAX_DICT [ key ] )
logging . info ( syntax )
syntax_list . append ( syntax )
return syntax_list |
def __construct_from_components ( self , ns_uri , prefix = None , schema_location = None ) :
"""Initialize this instance from a namespace URI , and optional
prefix and schema location URI .""" | assert ns_uri
# other fields are optional
self . uri = ns_uri
self . schema_location = schema_location or None
self . prefixes = OrderedSet ( )
if prefix :
self . prefixes . add ( prefix )
self . preferred_prefix = prefix or None |
def save ( self , out , kind = None , ** kw ) :
"""Serializes the QR Code in one of the supported formats .
The serialization format depends on the filename extension .
* * Common keywords * *
Name Description
scale Integer or float indicating the size of a single module .
Default : 1 . The interpretation... | writers . save ( self . matrix , self . _version , out , kind , ** kw ) |
def velocity_dispersion_numerical ( self , kwargs_lens , kwargs_lens_light , kwargs_anisotropy , kwargs_aperture , psf_fwhm , aperture_type , anisotropy_model , r_eff = None , kwargs_numerics = { } , MGE_light = False , MGE_mass = False , lens_model_kinematics_bool = None , light_model_kinematics_bool = None , Hernquis... | kwargs_cosmo = { 'D_d' : self . lensCosmo . D_d , 'D_s' : self . lensCosmo . D_s , 'D_ds' : self . lensCosmo . D_ds }
mass_profile_list = [ ]
kwargs_profile = [ ]
if lens_model_kinematics_bool is None :
lens_model_kinematics_bool = [ True ] * len ( kwargs_lens )
for i , lens_model in enumerate ( self . kwargs_optio... |
def login_required ( obj ) :
"""Requires that the user be logged in order to gain access to the resource
at the specified the URI .""" | decorator = request_passes_test ( lambda r , * args , ** kwargs : r . user . is_authenticated ( ) )
return wrap_object ( obj , decorator ) |
def pvt ( bars ) :
"""Price Volume Trend""" | trend = ( ( bars [ 'close' ] - bars [ 'close' ] . shift ( 1 ) ) / bars [ 'close' ] . shift ( 1 ) ) * bars [ 'volume' ]
return trend . cumsum ( ) |
def get_ngram_counts ( self ) :
'''Returns a list of n - gram counts
Array of classes counts and last item is for corpus''' | ngram_counts = { 'classes' : [ ] , 'corpus' : 0 }
doc_ids = self . term_count_n . keys ( )
doc_ids . sort ( )
for doc_id in doc_ids :
print self . term_count_n [ doc_id ]
class_ngrams = len ( self . term_count_n [ doc_id ] [ 'ngrams' ] )
ngram_counts [ 'classes' ] . append ( class_ngrams )
corpus_ngrams = l... |
def restore_descriptor ( self , table_name , columns , constraints , autoincrement_column = None ) :
"""Restore descriptor from SQL""" | # Fields
fields = [ ]
for column in columns :
if column . name == autoincrement_column :
continue
field_type = self . restore_type ( column . type )
field = { 'name' : column . name , 'type' : field_type }
if not column . nullable :
field [ 'constraints' ] = { 'required' : True }
fie... |
def deleteLink ( self , linkdict ) :
"""Delete link if PDF""" | CheckParent ( self )
val = _fitz . Page_deleteLink ( self , linkdict )
if linkdict [ "xref" ] == 0 :
return
linkid = linkdict [ "id" ]
try :
linkobj = self . _annot_refs [ linkid ]
linkobj . _erase ( )
except :
pass
return val |
def create_api_method_response ( restApiId , resourcePath , httpMethod , statusCode , responseParameters = None , responseModels = None , region = None , key = None , keyid = None , profile = None ) :
'''Create API method response for a method on a given resource in the given API
CLI Example :
. . code - block ... | try :
resource = describe_api_resource ( restApiId , resourcePath , region = region , key = key , keyid = keyid , profile = profile ) . get ( 'resource' )
if resource :
responseParameters = dict ( ) if responseParameters is None else responseParameters
responseModels = dict ( ) if responseModels... |
async def _write_frame_awaiting_response ( self , waiter_id , frame , request , no_wait , check_open = True , drain = True ) :
'''Write a frame and set a waiter for
the response ( unless no _ wait is set )''' | if no_wait :
await self . _write_frame ( frame , request , check_open = check_open , drain = drain )
return None
f = self . _set_waiter ( waiter_id )
try :
await self . _write_frame ( frame , request , check_open = check_open , drain = drain )
except Exception :
self . _get_waiter ( waiter_id )
f . ... |
def logical_intf_helper ( interface ) :
"""Logical Interface finder by name . Create if it doesn ' t exist .
This is useful when adding logical interfaces to for inline
or capture interfaces .
: param interface : logical interface name
: return str href : href of logical interface""" | if interface is None :
return LogicalInterface . get_or_create ( name = 'default_eth' ) . href
elif isinstance ( interface , LogicalInterface ) :
return interface . href
elif interface . startswith ( 'http' ) :
return interface
return LogicalInterface . get_or_create ( name = interface ) . href |
def c32address ( version , hash160hex ) :
"""> > > c32address ( 22 , ' a46ff88886c2ef9762d970b4d2c63678835bd39d ' )
' SP2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKNRV9EJ7'
> > > c32address ( 0 , ' 00000 ' )
' S000002AA028H '
> > > c32address ( 31 , ' 000001 ' )
' SZ000005HZ3DVN '
> > > c32address ( 20 , ' 1000001 ... | if not re . match ( r'^[0-9a-fA-F]{40}$' , hash160hex ) :
raise ValueError ( 'Invalid argument: not a hash160 hex string' )
c32string = c32checkEncode ( version , hash160hex )
return 'S{}' . format ( c32string ) |
def delNode ( self , address ) :
"""Just send it along if requested , should be able to delete the node even if it isn ' t
in our config anywhere . Usually used for normalization .""" | if address in self . nodes :
del self . nodes [ address ]
self . poly . delNode ( address ) |
def set_crc ( self ) :
"""Set Userdata [ 13 ] and Userdata [ 14 ] to the CRC value .""" | data = self . bytes [ 6 : 20 ]
crc = int ( 0 )
for b in data : # pylint : disable = unused - variable
for bit in range ( 0 , 8 ) :
fb = b & 0x01
fb = fb ^ 0x01 if ( crc & 0x8000 ) else fb
fb = fb ^ 0x01 if ( crc & 0x4000 ) else fb
fb = fb ^ 0x01 if ( crc & 0x1000 ) else fb
fb... |
def disconnectNetToMs ( Facility_presence = 0 , ProgressIndicator_presence = 0 , UserUser_presence = 0 , AllowedActions_presence = 0 ) :
"""DISCONNECT Section 9.3.7.1""" | a = TpPd ( pd = 0x3 )
b = MessageType ( mesType = 0x25 )
# 00100101
c = Cause ( )
packet = a / b / c
if Facility_presence is 1 :
d = FacilityHdr ( ieiF = 0x1C , eightBitF = 0x0 )
packet = packet / d
if ProgressIndicator_presence is 1 :
e = ProgressIndicatorHdr ( ieiPI = 0x1E , eightBitPI = 0x0 )
packet ... |
def parse_interval ( interval ) :
"""Attepmt to parse an ISO8601 formatted ` ` interval ` ` .
Returns a tuple of ` ` datetime . datetime ` ` and ` ` datetime . timedelta ` `
objects , order dependent on ` ` interval ` ` .""" | a , b = str ( interval ) . upper ( ) . strip ( ) . split ( '/' )
if a [ 0 ] is 'P' and b [ 0 ] is 'P' :
raise ParseError ( )
if a [ 0 ] != 'P' and b [ 0 ] != 'P' :
return parse_date ( a ) , parse_date ( b )
if a [ 0 ] is 'P' :
a = parse_duration ( a )
else :
a = parse_date ( a )
if b [ 0 ] is 'P' :
... |
def view_list ( self ) :
'''return a list of polygon indexes lists for the waypoints''' | done = set ( )
ret = [ ]
while len ( done ) != self . count ( ) :
p = self . view_indexes ( done )
if len ( p ) > 0 :
ret . append ( p )
return ret |
def inspect_distribution ( self , image , auth_config = None ) :
"""Get image digest and platform information by contacting the registry .
Args :
image ( str ) : The image name to inspect
auth _ config ( dict ) : Override the credentials that are found in the
config for this request . ` ` auth _ config ` ` ... | registry , _ = auth . resolve_repository_name ( image )
headers = { }
if auth_config is None :
header = auth . get_config_header ( self , registry )
if header :
headers [ 'X-Registry-Auth' ] = header
else :
log . debug ( 'Sending supplied auth config' )
headers [ 'X-Registry-Auth' ] = auth . enc... |
def deconv_stride2_multistep ( x , nbr_steps , output_filters , name = None , reuse = None ) :
"""Use a deconvolution to upsample x by 2 * * ` nbr _ steps ` .
Args :
x : a ` Tensor ` with shape ` [ batch , spatial , depth ] ` or
` [ batch , spatial _ 1 , spatial _ 2 , depth ] `
nbr _ steps : an int specifyi... | with tf . variable_scope ( name , default_name = "deconv_stride2_multistep" , values = [ x ] , reuse = reuse ) :
def deconv1d ( cur , i ) :
cur_shape = shape_list ( cur )
thicker = conv ( cur , output_filters * 2 , ( 1 , 1 ) , padding = "SAME" , activation = tf . nn . relu , name = "deconv1d" + str ... |
def _get_seal_key_ntlm1 ( negotiate_flags , exported_session_key ) :
"""3.4.5.3 SEALKEY
Calculates the seal _ key used to seal ( encrypt ) messages . This for authentication where
NTLMSSP _ NEGOTIATE _ EXTENDED _ SESSIONSECURITY has not been negotiated . Will weaken the keys
if NTLMSSP _ NEGOTIATE _ 56 is not... | if negotiate_flags & NegotiateFlags . NTLMSSP_NEGOTIATE_56 :
seal_key = exported_session_key [ : 7 ] + binascii . unhexlify ( 'a0' )
else :
seal_key = exported_session_key [ : 5 ] + binascii . unhexlify ( 'e538b0' )
return seal_key |
def _discover_ks_version ( self , url ) :
'''Keystone API version discovery''' | result = salt . utils . http . query ( url , backend = 'requests' , status = True , decode = True , decode_type = 'json' )
versions = json . loads ( result [ 'body' ] )
try :
links = [ ver [ 'links' ] for ver in versions [ 'versions' ] [ 'values' ] if ver [ 'status' ] == 'stable' ] [ 0 ] if result [ 'status' ] == 3... |
def _config_win32_nameservers ( self , nameservers ) :
"""Configure a NameServer registry entry .""" | # we call str ( ) on nameservers to convert it from unicode to ascii
nameservers = str ( nameservers )
split_char = self . _determine_split_char ( nameservers )
ns_list = nameservers . split ( split_char )
for ns in ns_list :
if not ns in self . nameservers :
self . nameservers . append ( ns ) |
def _find_statement_by_line ( node , line ) :
"""Extracts the statement on a specific line from an AST .
If the line number of node matches line , it will be returned ;
otherwise its children are iterated and the function is called
recursively .
: param node : An astroid node .
: type node : astroid . bas... | if isinstance ( node , ( nodes . ClassDef , nodes . FunctionDef ) ) : # This is an inaccuracy in the AST : the nodes that can be
# decorated do not carry explicit information on which line
# the actual definition ( class / def ) , but . fromline seems to
# be close enough .
node_line = node . fromlineno
else :
... |
def get_temperature ( self ) :
"""Get current temperature in celsius .""" | try :
request = requests . get ( '{}/temp' . format ( self . resource ) , timeout = self . timeout , allow_redirects = False )
self . temperature = request . json ( ) [ 'compensated' ]
return self . temperature
except requests . exceptions . ConnectionError :
raise exceptions . MyStromConnectionError ( ... |
def run_command_async ( self , msg ) :
''': type message _ generator : generator of dict
: param message _ generator : Generates messages from slack that should be run
: type fire _ all : bool
: param fire _ all : Whether to also fire messages to the event bus
: type tag : str
: param tag : The tag to sen... | log . debug ( 'Going to run a command asynchronous' )
runner_functions = sorted ( salt . runner . Runner ( __opts__ ) . functions )
# Parse args and kwargs
cmd = msg [ 'cmdline' ] [ 0 ]
args , kwargs = self . parse_args_and_kwargs ( msg [ 'cmdline' ] )
# Check for pillar string representation of dict and convert it to ... |
def compute_node_positions ( self ) :
"""Computes nodes positions .
Arranges nodes in a line starting at ( x , y ) = ( 0,0 ) . Node radius is
assumed to be equal to 0.5 units . Nodes are placed at integer
locations .""" | xs = [ 0 ] * len ( self . nodes )
ys = [ 0 ] * len ( self . nodes )
for i , _ in enumerate ( self . nodes [ 1 : ] , start = 1 ) :
prev_r = self . node_sizes [ i - 1 ] / 2
curr_r = self . node_sizes [ i ] / 2
xs [ i ] = xs [ i - 1 ] + prev_r + curr_r
self . node_coords = { "x" : xs , "y" : ys } |
def namedb_get_all_importing_namespace_hashes ( self , current_block ) :
"""Get the list of all non - expired preordered and revealed namespace hashes .""" | query = "SELECT preorder_hash FROM namespaces WHERE (op = ? AND reveal_block < ?) OR (op = ? AND block_number < ?);"
args = ( NAMESPACE_REVEAL , current_block + NAMESPACE_REVEAL_EXPIRE , NAMESPACE_PREORDER , current_block + NAMESPACE_PREORDER_EXPIRE )
namespace_rows = namedb_query_execute ( cur , query , args )
ret = [... |
def from_model_files ( cls , limits , input_model , investigation_time = 1.0 , simple_mesh_spacing = 1.0 , complex_mesh_spacing = 5.0 , mfd_width = 0.1 , area_discretisation = 10.0 ) :
"""Reads the hazard model from a file
: param list limits :
Grid configuration [ west , east , xspc , south , north , yspc ,
... | converter = SourceConverter ( investigation_time , simple_mesh_spacing , complex_mesh_spacing , mfd_width , area_discretisation )
sources = [ ]
for grp in nrml . to_python ( input_model , converter ) :
sources . extend ( grp . sources )
return cls ( limits , sources , area_discretisation ) |
def safe_py_code ( code ) :
'''Check a string to see if it has any potentially unsafe routines which
could be executed via python , this routine is used to improve the
safety of modules suct as virtualenv''' | bads = ( 'import' , ';' , 'subprocess' , 'eval' , 'open' , 'file' , 'exec' , 'input' )
for bad in bads :
if code . count ( bad ) :
return False
return True |
def __path ( self , path ) :
"""Adds the prefix to the given path
: param path : Z - Path
: return : Prefixed Z - Path""" | if path . startswith ( self . __prefix ) :
return path
return "{}{}" . format ( self . __prefix , path ) |
def deriv2 ( self , p ) :
"""Second derivative of the power transform
Parameters
p : array - like
Mean parameters
Returns
g ' ' ( p ) : array
Second derivative of the power transform of ` p `
Notes
g ' ' ( ` p ` ) = ` power ` * ( ` power ` - 1 ) * ` p ` * * ( ` power ` - 2)""" | return self . power * ( self . power - 1 ) * np . power ( p , self . power - 2 ) |
def _find_vpcs ( vpc_id = None , vpc_name = None , cidr = None , tags = None , region = None , key = None , keyid = None , profile = None ) :
'''Given VPC properties , find and return matching VPC ids .
Borrowed from boto _ vpc ; these could be refactored into a common library''' | if all ( ( vpc_id , vpc_name ) ) :
raise SaltInvocationError ( 'Only one of vpc_name or vpc_id may be ' 'provided.' )
if not any ( ( vpc_id , vpc_name , tags , cidr ) ) :
raise SaltInvocationError ( 'At least one of the following must be ' 'provided: vpc_id, vpc_name, cidr or tags.' )
local_get_conn = __utils__... |
def merge_option_dicts ( old_opts , new_opts ) :
"""Update the old _ opts option dictionary with the options defined in
new _ opts . Instead of a shallow update as would be performed by calling
old _ opts . update ( new _ opts ) , this updates the dictionaries of all option
types separately .
Given two dict... | merged = dict ( old_opts )
for option_type , options in new_opts . items ( ) :
if option_type not in merged :
merged [ option_type ] = { }
merged [ option_type ] . update ( options )
return merged |
def _spawn_background_rendering ( self , rate = 5.0 ) :
"""Spawns a thread that updates the render window .
Sometimes directly modifiying object data doesn ' t trigger
Modified ( ) and upstream objects won ' t be updated . This
ensures the render window stays updated without consuming too
many resources .""... | self . render_trigger . connect ( self . ren_win . Render )
twait = rate ** - 1
def render ( ) :
while self . active :
time . sleep ( twait )
self . _render ( )
self . render_thread = Thread ( target = render )
self . render_thread . start ( ) |
def combine_reports ( original , new ) :
"""Combines two gcov reports for a file into one by adding the number of hits on each line""" | if original is None :
return new
report = { }
report [ 'name' ] = original [ 'name' ]
report [ 'source_digest' ] = original [ 'source_digest' ]
coverage = [ ]
for original_num , new_num in zip ( original [ 'coverage' ] , new [ 'coverage' ] ) :
if original_num is None :
coverage . append ( new_num )
... |
def _hlink ( self ) :
"""Reference to the ` a : hlinkClick ` or ` h : hlinkHover ` element for this
click action . Returns | None | if the element is not present .""" | if self . _hover :
return self . _element . hlinkHover
return self . _element . hlinkClick |
def set_encode_key_value ( self , value , store_type ) :
"""Save the key value base on it ' s storage type .""" | self . _store_type = store_type
if store_type == PUBLIC_KEY_STORE_TYPE_HEX :
self . _value = value . hex ( )
elif store_type == PUBLIC_KEY_STORE_TYPE_BASE64 :
self . _value = b64encode ( value ) . decode ( )
elif store_type == PUBLIC_KEY_STORE_TYPE_BASE85 :
self . _value = b85encode ( value ) . decode ( )
e... |
def wait ( * args , ** kwargs ) :
"""Wrapping ' wait ( ) ' method of ' waiting ' library with default parameter values .
WebDriverException is ignored in the expected exceptions by default .""" | kwargs . setdefault ( 'sleep_seconds' , ( 1 , None ) )
kwargs . setdefault ( 'expected_exceptions' , WebDriverException )
kwargs . setdefault ( 'timeout_seconds' , webium . settings . wait_timeout )
return wait_lib ( * args , ** kwargs ) |
def new_label_descriptors ( defaults , keys ) :
"""create labels for the metric _ descriptor
that will be sent to Stackdriver Monitoring""" | label_descriptors = [ ]
for lk in itertools . chain . from_iterable ( ( defaults . keys ( ) , keys ) ) :
label = { }
label [ "key" ] = sanitize_label ( lk . key )
label [ "description" ] = lk . description
label_descriptors . append ( label )
return label_descriptors |
def set_axis_color ( axis , color , alpha = None ) :
"""Sets the spine color of all sides of an axis ( top , right , bottom , left ) .""" | for side in ( 'top' , 'right' , 'bottom' , 'left' ) :
spine = axis . spines [ side ]
spine . set_color ( color )
if alpha is not None :
spine . set_alpha ( alpha ) |
def get_slices ( self , idx , shape ) -> Tuple [ IntOrSlice , ... ] :
"""Return a | tuple | of one | int | and some | slice | objects to
accesses all values of a certain device within
| NetCDFVariableDeep . array | .
> > > from hydpy . core . netcdftools import NetCDFVariableDeep
> > > ncvar = NetCDFVariabl... | slices = list ( self . get_timeplaceslice ( idx ) )
for length in shape :
slices . append ( slice ( 0 , length ) )
return tuple ( slices ) |
def patch_relationship ( self , session , json_data , api_type , obj_id , rel_key ) :
"""Replacement of relationship values .
: param session : SQLAlchemy session
: param json _ data : Request JSON Data
: param api _ type : Type of the resource
: param obj _ id : ID of the resource
: param rel _ key : Key... | model = self . _fetch_model ( api_type )
resource = self . _fetch_resource ( session , api_type , obj_id , Permissions . EDIT )
if rel_key not in resource . __jsonapi_map_to_py__ . keys ( ) :
raise RelationshipNotFoundError ( resource , resource , rel_key )
py_key = resource . __jsonapi_map_to_py__ [ rel_key ]
rela... |
def qteAddMiniApplet ( self , appletObj : QtmacsApplet ) :
"""Install ` ` appletObj ` ` as the mini applet in the window layout .
At any given point there can ever only be one mini applet in
the entire Qtmacs application , irrespective of how many
windows are open .
Note that this method does nothing if a c... | # Do nothing if a custom mini applet has already been
# installed .
if self . _qteMiniApplet is not None :
msg = 'Cannot replace mini applet more than once.'
self . qteLogger . warning ( msg )
return False
# Arrange all registered widgets inside this applet
# automatically if the mini applet object did not ... |
def get_processes ( self ) :
"""Grab a shuffled list of all currently running process names""" | procs = set ( )
try : # POSIX ps , so it should work in most environments where doge would
p = sp . Popen ( [ 'ps' , '-A' , '-o' , 'comm=' ] , stdout = sp . PIPE )
output , error = p . communicate ( )
if sys . version_info > ( 3 , 0 ) :
output = output . decode ( 'utf-8' )
for comm in output . s... |
def derived_from_all ( self , identities : List [ QualName ] ) -> MutableSet [ QualName ] :
"""Return list of identities transitively derived from all ` identity ` .""" | if not identities :
return set ( )
res = self . derived_from ( identities [ 0 ] )
for id in identities [ 1 : ] :
res &= self . derived_from ( id )
return res |
def read ( filename , ** kwargs ) :
"""Read a generic input file into a recarray .
Accepted file formats : [ . fits , . fz , . npy , . csv , . txt , . dat ]
Parameters :
filename : input file name
kwargs : keyword arguments for the reader
Returns :
recarray : data array""" | base , ext = os . path . splitext ( filename )
if ext in ( '.fits' , '.fz' ) : # Abstract fits here . . .
return fitsio . read ( filename , ** kwargs )
elif ext in ( '.npy' ) :
return np . load ( filename , ** kwargs )
elif ext in ( '.csv' ) :
return np . recfromcsv ( filename , ** kwargs )
elif ext in ( '.... |
def create_issue ( self , title , body = None , assignee = None , milestone = None , labels = None ) :
"""Creates an issue on this repository .
: param str title : ( required ) , title of the issue
: param str body : ( optional ) , body of the issue
: param str assignee : ( optional ) , login of the user to a... | issue = { 'title' : title , 'body' : body , 'assignee' : assignee , 'milestone' : milestone , 'labels' : labels }
self . _remove_none ( issue )
json = None
if issue :
url = self . _build_url ( 'issues' , base_url = self . _api )
json = self . _json ( self . _post ( url , data = issue ) , 201 )
return Issue ( js... |
def wrap_content_as_binary_if_needed ( func_ , * args , ** kwargs ) :
"""destination ( rethinkdb ) needs the id field as primary key
put the Name field into the ID field
: param func _ :
: param args :
: param kwargs :
: return :""" | assert isinstance ( args [ 0 ] , dict )
try :
args [ 0 ] [ CONTENT_FIELD ] = BINARY ( args [ 0 ] . get ( CONTENT_FIELD , b"" ) )
except ( r . errors . ReqlDriverCompileError , AttributeError ) : # pragma : no cover
pass
# toss in the object as string
return func_ ( * args , ** kwargs ) |
def _publish_deferred_messages ( self ) :
"""Called when pika is connected and has a channel open to publish
any requests buffered .""" | global message_stack
if not self . _rabbitmq_is_closed and message_stack :
LOGGER . info ( 'Publishing %i deferred message(s)' , len ( message_stack ) )
while message_stack :
self . _publish_message ( * message_stack . pop ( ) ) |
def from_file ( cls , filename , keep_neg = False , ** kwargs ) :
"""Create a spectrum from file .
If filename has ' fits ' or ' fit ' suffix , it is read as FITS .
Otherwise , it is read as ASCII .
Parameters
filename : str
Spectrum filename .
keep _ neg : bool
See ` ~ synphot . models . Empirical1D ... | header , wavelengths , fluxes = specio . read_spec ( filename , ** kwargs )
return cls ( Empirical1D , points = wavelengths , lookup_table = fluxes , keep_neg = keep_neg , meta = { 'header' : header } ) |
def pending_transactions ( server ) :
"""get the no . of pending transactions ( 0 confirmations ) on a server""" | namecoind = NamecoindClient ( server , NAMECOIND_PORT , NAMECOIND_USER , NAMECOIND_PASSWD )
reply = namecoind . listtransactions ( "" , 10000 )
counter = 0
for i in reply :
if i [ 'confirmations' ] == 0 :
counter += 1
return counter |
def p_const_value_primitive ( self , p ) :
'''const _ value _ primitive : INTCONSTANT
| DUBCONSTANT
| LITERAL
| const _ bool''' | p [ 0 ] = ast . ConstPrimitiveValue ( p [ 1 ] , lineno = p . lineno ( 1 ) ) |
def setObjectName ( self , objectName ) :
"""Updates the style sheet for this line edit when the name changes .
: param objectName | < str >""" | super ( XLineEdit , self ) . setObjectName ( objectName )
self . adjustStyleSheet ( ) |
def _clip_line ( self , line_pt_1 , line_pt_2 ) :
"""clip line to canvas""" | x_min = min ( line_pt_1 [ 0 ] , line_pt_2 [ 0 ] )
x_max = max ( line_pt_1 [ 0 ] , line_pt_2 [ 0 ] )
y_min = min ( line_pt_1 [ 1 ] , line_pt_2 [ 1 ] )
y_max = max ( line_pt_1 [ 1 ] , line_pt_2 [ 1 ] )
extent = self . extent ( )
if line_pt_1 [ 0 ] == line_pt_2 [ 0 ] :
return ( ( line_pt_1 [ 0 ] , max ( y_min , extent... |
def rowsAfterValue ( self , value , count ) :
"""Retrieve some rows at or after a given sort - column value .
@ param value : Starting value in the index for the current sort column
at which to start returning results . Rows with a column value for the
current sort column which is greater than or equal to thi... | if value is None :
query = self . inequalityQuery ( None , count , True )
else :
pyvalue = self . _toComparableValue ( value )
currentSortAttribute = self . currentSortColumn . sortAttribute ( )
query = self . inequalityQuery ( currentSortAttribute >= pyvalue , count , True )
return self . constructRows... |
def init_disk_cache ( self ) :
"""Initialize the on - disk version of the cache .""" | try : # Cleanup old disk cache files
path = self . disk_cache_location
os . remove ( path )
except Exception :
pass
self . disk_cache_location = os . path . join ( tempfile . mkdtemp ( ) , 'cache' ) |
def prepare_image ( tarpath , outfolder , ** kwargs ) :
"""Unpack the OS image stored at tarpath to outfolder .
Prepare the unpacked image for use as a VR base image .""" | outfolder = path . Path ( outfolder )
untar ( tarpath , outfolder , ** kwargs )
# Some OSes have started making / etc / resolv . conf into a symlink to
# / run / resolv . conf . That prevents us from bind - mounting to that
# location . So delete that symlink , if it exists .
resolv_path = outfolder / 'etc' / 'resolv.c... |
def serial_control_send ( self , device , flags , timeout , baudrate , count , data , force_mavlink1 = False ) :
'''Control a serial port . This can be used for raw access to an onboard
serial peripheral such as a GPS or telemetry radio . It
is designed to make it possible to update the devices
firmware via M... | return self . send ( self . serial_control_encode ( device , flags , timeout , baudrate , count , data ) , force_mavlink1 = force_mavlink1 ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.