signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def consistent_with ( self , state ) :
"""Indicate that the query should be consistent with one or more
mutations .
: param state : The state of the mutations it should be consistent
with .
: type state : : class : ` ~ . couchbase . mutation _ state . MutationState `""" | if self . consistency not in ( UNBOUNDED , NOT_BOUNDED , 'at_plus' ) :
raise TypeError ( 'consistent_with not valid with other consistency options' )
if not state :
raise TypeError ( 'Passed empty or invalid state' , state )
self . consistency = 'at_plus'
self . _body [ 'scan_vectors' ] = state . _sv |
def expire ( self , key , timeout ) :
"""Set a timeout on key .
if timeout is float it will be multiplied by 1000
coerced to int and passed to ` pexpire ` method .
Otherwise raises TypeError if timeout argument is not int .""" | if isinstance ( timeout , float ) :
return self . pexpire ( key , int ( timeout * 1000 ) )
if not isinstance ( timeout , int ) :
raise TypeError ( "timeout argument must be int, not {!r}" . format ( timeout ) )
fut = self . execute ( b'EXPIRE' , key , timeout )
return wait_convert ( fut , bool ) |
def convert_text_to_rouge_format ( text , title = "dummy title" ) :
"""Convert a text to a format ROUGE understands . The text is
assumed to contain one sentence per line .
text : The text to convert , containg one sentence per line .
title : Optional title for the text . The title will appear
in the conver... | sentences = text . split ( "\n" )
sent_elems = [ "<a name=\"{i}\">[{i}]</a> <a href=\"#{i}\" id={i}>" "{text}</a>" . format ( i = i , text = sent ) for i , sent in enumerate ( sentences , start = 1 ) ]
html = """<html>
<head>
<title>{title}</title>
</head>
<body bgcolor="white">
{elems}
</body>
</html>""" . format ( ti... |
def __validate1 ( property ) :
"""Exit with error if property is not valid .""" | assert isinstance ( property , Property )
msg = None
if not property . feature . free :
feature . validate_value_string ( property . feature , property . value ) |
def main ( ) :
"""The main entry point of the program""" | # Parse command line arguments
argp = _cli_argument_parser ( )
args = argp . parse_args ( )
# setup logging
logging . basicConfig ( level = args . loglevel , format = "%(levelname)s %(message)s" )
console . display ( "Collecting documentation from files" )
collector_metrics = metrics . Metrics ( )
docs = collector . pa... |
def get_scan_results_xml ( self , scan_id , pop_res ) :
"""Gets scan _ id scan ' s results in XML format .
@ return : String of scan results in xml .""" | results = Element ( 'results' )
for result in self . scan_collection . results_iterator ( scan_id , pop_res ) :
results . append ( get_result_xml ( result ) )
logger . info ( 'Returning %d results' , len ( results ) )
return results |
def format_py3o_val ( value ) :
"""format a value to fit py3o ' s context
* Handle linebreaks""" | value = force_unicode ( value )
value = escape ( value )
value = value . replace ( u'\n' , u'<text:line-break/>' )
return Markup ( value ) |
def locateChild ( self , context , segments ) :
"""Delegate dispatch to a sharing resource if the request is for a user
subdomain , otherwise fall back to the wrapped resource ' s C { locateChild }
implementation .""" | request = IRequest ( context )
hostname = request . getHeader ( 'host' )
info = self . subdomain ( hostname )
if info is not None :
username , domain = info
index = UserIndexPage ( IRealm ( self . siteStore ) , self . webViewer )
resource = index . locateChild ( None , [ username ] ) [ 0 ]
return resour... |
def sort_with_heap ( sequence ) :
"""The function inserts values into a heap structure and successively removes and returns the smallest elements .
Example :
sort _ with _ heap ( [ 1 , 3 , 5 , 7 , 9 , 2 , 4 , 6 , 8 , 0 ] )
Returns : [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9]
sort _ with _ heap ( [ 25 , 35 , 2... | import heapq as hq
heap = [ ]
for item in sequence :
hq . heappush ( heap , item )
return [ hq . heappop ( heap ) for _ in range ( len ( heap ) ) ] |
def env_int ( name , required = False , default = empty ) :
"""Pulls an environment variable out of the environment and casts it to an
integer . If the name is not present in the environment and no default is
specified then a ` ` ValueError ` ` will be raised . Similarly , if the
environment value is not cast... | value = get_env_value ( name , required = required , default = default )
if value is empty :
raise ValueError ( "`env_int` requires either a default value to be specified, or for " "the variable to be present in the environment" )
return int ( value ) |
def _kernel_shape ( self , input_shape ) :
"""Helper to calculate the kernel shape .""" | kernel_size_iter = iter ( self . _kernel_size )
return [ self . _filters if c == 'O' else input_shape [ self . _lhs_spec . index ( 'C' ) ] if c == 'I' else next ( kernel_size_iter ) for c in self . _rhs_spec ] |
def list_build_records_for_set ( id = None , name = None , page_size = 200 , page_index = 0 , sort = "" , q = "" ) :
"""List all build records for a BuildConfigurationSet""" | content = list_build_records_for_set_raw ( id , name , page_size , page_index , sort , q )
if content :
return utils . format_json_list ( content ) |
def create_pipeline ( self , onetime = None ) :
"""Create the spinnaker pipeline ( s ) .""" | utils . banner ( "Creating Pipeline" )
kwargs = { 'app' : self . app , 'trigger_job' : self . trigger_job , 'prop_path' : self . json_path , 'base' : None , 'runway_dir' : self . runway_dir , }
pipeline_type = self . configs [ 'pipeline' ] [ 'type' ]
if pipeline_type not in consts . ALLOWED_TYPES :
raise NotImpleme... |
def OnLineWidth ( self , event ) :
"""Line width choice event handler""" | linewidth_combobox = event . GetEventObject ( )
idx = event . GetInt ( )
width = int ( linewidth_combobox . GetString ( idx ) )
borders = self . bordermap [ self . borderstate ]
post_command_event ( self , self . BorderWidthMsg , width = width , borders = borders ) |
def simple_profile ( self , annual_demand , ** kwargs ) :
"""Create industrial load profile
Parameters
annual _ demand : float
Total demand .
Other Parameters
am : datetime . time
beginning of workday
pm : datetime . time
end of workday
week : list
list of weekdays
weekend : list
list of wee... | # Day ( am to pm ) , night ( pm to am ) , week day ( week ) ,
# weekend day ( weekend )
am = kwargs . get ( 'am' , settime ( 7 , 00 , 0 ) )
pm = kwargs . get ( 'pm' , settime ( 23 , 30 , 0 ) )
week = kwargs . get ( 'week' , [ 1 , 2 , 3 , 4 , 5 ] )
weekend = kwargs . get ( 'weekend' , [ 0 , 6 , 7 ] )
default_factors = {... |
def assert_has_attr ( obj , attribute , msg_fmt = "{msg}" ) :
"""Fail is an object does not have an attribute .
> > > assert _ has _ attr ( [ ] , " index " )
> > > assert _ has _ attr ( [ ] , " i _ do _ not _ have _ this " )
Traceback ( most recent call last ) :
AssertionError : [ ] does not have attribute ... | if not hasattr ( obj , attribute ) :
msg = "{!r} does not have attribute '{}'" . format ( obj , attribute )
fail ( msg_fmt . format ( msg = msg , obj = obj , attribute = attribute ) ) |
def save_configuration_to_hdf5 ( register , configuration_file , name = '' ) :
'''Saving configuration to HDF5 file from register object
Parameters
register : pybar . fei4 . register object
configuration _ file : string , file
Filename of the HDF5 configuration file or file object .
name : string
Additi... | def save_conf ( ) :
logging . info ( "Saving configuration: %s" % h5_file . filename )
register . configuration_file = h5_file . filename
try :
configuration_group = h5_file . create_group ( h5_file . root , "configuration" )
except tb . NodeError :
configuration_group = h5_file . root .... |
def yesno ( message , default = 'yes' , suffix = ' ' ) :
"""Prompt user to answer yes or no . Return True if the default is chosen ,
otherwise False .""" | if default == 'yes' :
yesno_prompt = '[Y/n]'
elif default == 'no' :
yesno_prompt = '[y/N]'
else :
raise ValueError ( "default must be 'yes' or 'no'." )
if message != '' :
prompt_text = "{0} {1}{2}" . format ( message , yesno_prompt , suffix )
else :
prompt_text = "{0}{1}" . format ( yesno_prompt , s... |
def requirements ( self , requires ) :
'''Sets the requirements for the package .
It will take either a valid path to a requirements file or
a list of requirements .''' | if requires :
if isinstance ( requires , basestring ) and os . path . isfile ( os . path . abspath ( requires ) ) :
self . _requirements_file = os . path . abspath ( requires )
else :
if isinstance ( self . _requirements , basestring ) :
requires = requires . split ( )
self .... |
async def send_notification ( self , method , args = ( ) ) :
'''Send an RPC notification over the network .''' | message = self . connection . send_notification ( Notification ( method , args ) )
await self . _send_message ( message ) |
def _create_activity2 ( self , parent , name , activity_type = ActivityType . TASK ) :
"""Create a new activity .
. . important : :
This function creates activities for KE - chain versions later than 2.9.0-135
In effect where the module ' wim ' has version ' > = 2.0.0 ' .
The version of ' wim ' in KE - chai... | # WIM1 : activity _ class , WIM2 : activity _ type
if self . match_app_version ( label = 'wim' , version = '<2.0.0' , default = True ) :
raise APIError ( 'This method is only compatible with versions of KE-chain where the internal `wim` module ' 'has a version >=2.0.0. Use the `Client.create_activity()` method.' )
... |
def post ( self , request , bot_id , format = None ) :
"""Add a new hook
serializer : HookSerializer
responseMessages :
- code : 401
message : Not authenticated
- code : 400
message : Not valid request""" | return super ( HookList , self ) . post ( request , bot_id , format ) |
async def wait_tasks ( tasks , flatten = True ) :
'''Gather a list of asynchronous tasks and wait their completion .
: param list tasks :
A list of * asyncio * tasks wrapped in : func : ` asyncio . ensure _ future ` .
: param bool flatten :
If ` ` True ` ` the returned results are flattened into one list if... | rets = await asyncio . gather ( * tasks )
if flatten and all ( map ( lambda x : hasattr ( x , '__iter__' ) , rets ) ) :
rets = list ( itertools . chain ( * rets ) )
return rets |
def get ( self , model_class , strict = True , returnDict = False , fetchOne = False , ** where ) :
'''params :
model _ class : The queried model class
strict : bool - > If True , queries are run with EQUAL ( = ) operator .
If False : Queries are run with RLIKE keyword
returnDict : bool - > Return a list if... | self . typeassert ( model_class , strict , returnDict , where )
table = model_class . __name__ . lower ( )
with Session ( self . settings ) as conn :
if not where :
query = f'SELECT * FROM {table}'
else :
query = f'SELECT * FROM {table} WHERE'
index = 1
operator = '=' if strict else 'RLI... |
def key_binding ( self , keydef , mode = 'force' ) :
"""Function decorator to register a low - level key binding .
The callback function signature is ` ` fun ( key _ state , key _ name ) ` ` where ` ` key _ state ` ` is either ` ` ' U ' ` ` for " key
up " or ` ` ' D ' ` ` for " key down " .
The keydef format ... | def register ( fun ) :
fun . mpv_key_bindings = getattr ( fun , 'mpv_key_bindings' , [ ] ) + [ keydef ]
def unregister_all ( ) :
for keydef in fun . mpv_key_bindings :
self . unregister_key_binding ( keydef )
fun . unregister_mpv_key_bindings = unregister_all
self . register_key_bind... |
def FileEntryExistsByPathSpec ( self , path_spec ) :
"""Determines if a file entry for a path specification exists .
Args :
path _ spec ( PathSpec ) : a path specification .
Returns :
bool : True if the file entry exists .""" | location = getattr ( path_spec , 'location' , None )
if location is None or not location . startswith ( self . LOCATION_ROOT ) :
return False
if len ( location ) == 1 :
return True
return self . _cpio_archive_file . FileEntryExistsByPath ( location [ 1 : ] ) |
def index ( args ) :
"""% prog index database . fasta
Wrapper for ` bwa index ` . Same interface .""" | p = OptionParser ( index . __doc__ )
opts , args = p . parse_args ( args )
if len ( args ) != 1 :
sys . exit ( not p . print_help ( ) )
dbfile , = args
check_index ( dbfile ) |
def _tot_unhandled_services_by_state ( self , state ) :
"""Generic function to get the number of unhandled problem services in the specified state
: param state : state to filter on
: type state :
: return : number of service in state * state * and which are not acknowledged problems
: rtype : int""" | return sum ( 1 for s in self . services if s . state == state and s . is_problem and not s . problem_has_been_acknowledged ) |
def _add_to_download_queue ( self , lpath , rfile ) : # type : ( Downloader , pathlib . Path ,
# blobxfer . models . azure . StorageEntity ) - > None
"""Add remote file to download queue
: param Downloader self : this
: param pathlib . Path lpath : local path
: param blobxfer . models . azure . StorageEntity ... | # prepare remote file for download
dd = blobxfer . models . download . Descriptor ( lpath , rfile , self . _spec . options , self . _general_options , self . _resume )
with self . _transfer_lock :
self . _transfer_cc [ dd . entity . path ] = 0
if dd . entity . is_encrypted :
self . _dd_map [ str ( dd . ... |
def report_many ( self , event_list , metadata = None , block = None ) :
"""Reports all the given events to Alooma by formatting them properly and
placing them in the buffer to be sent by the Sender instance
: param event _ list : A list of dicts / strings representing events
: param metadata : ( Optional ) A... | failed_list = [ ]
for index , event in enumerate ( event_list ) :
queued_successfully = self . report ( event , metadata , block )
if not queued_successfully :
failed_list . append ( ( index , event ) )
return failed_list |
def strip_tweet ( text , remove_url = True ) :
"""Strip tweet message .
This method removes mentions strings and urls ( optional ) .
: param text : tweet message
: type text : : class : ` str `
: param remove _ url : Remove urls . default : const : ` True ` .
: type remove _ url : : class : ` boolean `
... | if remove_url :
text = url_pattern . sub ( '' , text )
else :
text = expand_url ( text )
text = mention_pattern . sub ( '' , text )
text = html_parser . unescape ( text )
text = text . strip ( )
return text |
def remove_foothills ( self , q_data , marked , bin_num , bin_lower , centers , foothills ) :
"""Mark points determined to be foothills as globbed , so that they are not included in
future searches . Also searches neighboring points to foothill points to determine
if they should also be considered foothills .
... | hills = [ ]
for foot in foothills :
center = foot [ 0 ]
hills [ : ] = foot [ 1 ] [ : ]
# remove all foothills
while len ( hills ) > 0 : # mark this point
pt = hills . pop ( - 1 )
marked [ pt ] = self . GLOBBED
for s_index , val in np . ndenumerate ( marked [ pt [ 0 ] - 1 : pt [ 0... |
def sort ( self , key , * get_patterns , by = None , offset = None , count = None , asc = None , alpha = False , store = None ) :
"""Sort the elements in a list , set or sorted set .""" | args = [ ]
if by is not None :
args += [ b'BY' , by ]
if offset is not None and count is not None :
args += [ b'LIMIT' , offset , count ]
if get_patterns :
args += sum ( ( [ b'GET' , pattern ] for pattern in get_patterns ) , [ ] )
if asc is not None :
args += [ asc is True and b'ASC' or b'DESC' ]
if alp... |
def query ( self , coords ) :
"""Args :
coords ( ` astropy . coordinates . SkyCoord ` ) : The coordinates to query .
Returns :
A float array of the value of the map at the given coordinates . The
shape of the output is the same as the shape of the coordinates
stored by ` coords ` .""" | pix_idx = coord2healpix ( coords , self . _frame , self . _nside , nest = self . _nest )
return self . _pix_val [ pix_idx ] |
def init ( ) :
'''Return the list of svn remotes and their configuration information''' | bp_ = os . path . join ( __opts__ [ 'cachedir' ] , 'svnfs' )
new_remote = False
repos = [ ]
per_remote_defaults = { }
for param in PER_REMOTE_OVERRIDES :
per_remote_defaults [ param ] = six . text_type ( __opts__ [ 'svnfs_{0}' . format ( param ) ] )
for remote in __opts__ [ 'svnfs_remotes' ] :
repo_conf = copy ... |
def plot_csm_and_maps ( self , isite , max_csm = 8.0 ) :
"""Plotting of the coordination numbers of a given site for all the distfactor / angfactor parameters . If the
chemical environments are given , a color map is added to the plot , with the lowest continuous symmetry measure
as the value for the color of t... | try :
import matplotlib . pyplot as plt
except ImportError :
print ( 'Plotting Chemical Environments requires matplotlib ... exiting "plot" function' )
return
fig = self . get_csm_and_maps ( isite = isite , max_csm = max_csm )
if fig is None :
return
plt . show ( ) |
async def _load_tuple ( self , reader , elem_type , params = None , elem = None ) :
"""Loads tuple of elements from the reader . Supports the tuple ref .
Returns loaded tuple .
: param reader :
: param elem _ type :
: param params :
: param container :
: return :""" | c_len = await load_uvarint ( reader )
if elem and c_len != len ( elem ) :
raise ValueError ( "Size mismatch" )
if c_len != len ( elem_type . f_specs ( ) ) :
raise ValueError ( "Tuple size mismatch" )
elem_fields = params [ 0 ] if params else None
if elem_fields is None :
elem_fields = elem_type . f_specs ( ... |
def main ( ) :
"""Use processes and Netmiko to connect to each of the devices . Execute
' show version ' on each device . Record the amount of time required to do this .""" | start_time = datetime . now ( )
procs = [ ]
for a_device in devices :
my_proc = Process ( target = show_version , args = ( a_device , ) )
my_proc . start ( )
procs . append ( my_proc )
for a_proc in procs :
print ( a_proc )
a_proc . join ( )
print ( "\nElapsed time: " + str ( datetime . now ( ) - st... |
def refresh ( self , url = CONST . PANEL_URL ) :
"""Refresh the alarm device .""" | response_object = AbodeDevice . refresh ( self , url )
# pylint : disable = W0212
self . _abode . _panel . update ( response_object [ 0 ] )
return response_object |
def rangefinder_send ( self , distance , voltage , force_mavlink1 = False ) :
'''Rangefinder reporting
distance : distance in meters ( float )
voltage : raw voltage if available , zero otherwise ( float )''' | return self . send ( self . rangefinder_encode ( distance , voltage ) , force_mavlink1 = force_mavlink1 ) |
def t_ID ( self , t ) :
r'[ a - zA - Z ] +' | if t . value in self . _RESERVED . keys ( ) :
t . type = self . _RESERVED [ t . value ]
return t
if Information . is_valid_symbol ( t . value ) or Information . is_valid_category ( t . value ) :
t . type = self . _INFORMATION_UNIT
return t
if Duration . is_valid_symbol ( t . value ) :
t . type = sel... |
def find_actions ( orbit , N_max , force_harmonic_oscillator = False , toy_potential = None ) :
r"""Find approximate actions and angles for samples of a phase - space orbit .
Uses toy potentials with known , analytic action - angle transformations to
approximate the true coordinates as a Fourier sum .
This co... | if orbit . norbits == 1 :
return _single_orbit_find_actions ( orbit , N_max , force_harmonic_oscillator = force_harmonic_oscillator , toy_potential = toy_potential )
else :
norbits = orbit . norbits
actions = np . zeros ( ( 3 , norbits ) )
angles = np . zeros ( ( 3 , norbits ) )
freqs = np . zeros (... |
def add_geo_facet ( self , * args , ** kwargs ) :
"""Add a geo factory facet""" | self . facets . append ( GeoDistanceFacet ( * args , ** kwargs ) ) |
def adaptive_rejection_sampling ( logpdf : callable , a : float , b : float , domain : Tuple [ float , float ] , n_samples : int , random_stream = None ) :
"""Adaptive rejection sampling samples exactly ( all samples are i . i . d ) and efficiently from any univariate log - concave distribution . The basic idea is ... | assert ( hasattr ( logpdf , "__call__" ) )
assert ( len ( domain ) == 2 ) , "Domain must be two-element iterable."
assert ( domain [ 1 ] >= domain [ 0 ] ) , "Invalid domain, it must hold: domain[1] >= domain[0]."
assert ( n_samples >= 0 ) , "Number of samples must be >= 0."
if random_stream is None :
random_stream ... |
def is_all_field_none ( self ) :
""": rtype : bool""" | if self . _id_ is not None :
return False
if self . _description is not None :
return False
if self . _ean_code is not None :
return False
if self . _avatar_attachment is not None :
return False
if self . _tab_attachment is not None :
return False
if self . _quantity is not None :
return False
i... |
def delete ( self , ids ) :
"""Method to delete environments vip by their id ' s .
: param ids : Identifiers of environments vip
: return : None""" | url = build_uri_with_ids ( 'api/v3/environment-vip/%s/' , ids )
return super ( ApiEnvironmentVip , self ) . delete ( url ) |
def get ( self , node_id ) :
"""Args :
node _ id : Returns an Entry instance for the given node ID .
If the requested node ID does not exist , throws KeyError .""" | if ( self . _registry [ node_id ] . monotonic_timestamp + self . TIMEOUT ) < time . monotonic ( ) :
self . _call_event_handlers ( self . UpdateEvent ( self . _registry [ node_id ] , self . UpdateEvent . EVENT_ID_OFFLINE ) )
del self . _registry [ node_id ]
return self . _registry [ node_id ] |
def lift_chart ( df , col_true = None , col_pred = None , col_scores = None , pos_label = 1 ) :
r"""Compute life value , true positive rate ( TPR ) and threshold from predicted DataFrame .
Note that this method will trigger the defined flow to execute .
: param df : predicted data frame
: type df : DataFrame ... | if not col_pred :
col_pred = get_field_name_by_role ( df , FieldRole . PREDICTED_CLASS )
if not col_scores :
col_scores = get_field_name_by_role ( df , FieldRole . PREDICTED_SCORE )
thresh , tp , fn , tn , fp = _run_roc_node ( df , pos_label , col_true , col_pred , col_scores )
depth = ( tp + fp ) * 1.0 / ( tp ... |
def quoted_or_list ( items : List [ str ] ) -> Optional [ str ] :
"""Given [ A , B , C ] return " ' A ' , ' B ' , or ' C ' " .
Note : We use single quotes here , since these are also used by repr ( ) .""" | return or_list ( [ f"'{item}'" for item in items ] ) |
def autofix ( W , copy = True ) :
'''Fix a bunch of common problems . More specifically , remove Inf and NaN ,
ensure exact binariness and symmetry ( i . e . remove floating point
instability ) , and zero diagonal .
Parameters
W : np . ndarray
weighted connectivity matrix
copy : bool
if True , returns... | if copy :
W = W . copy ( )
# zero diagonal
np . fill_diagonal ( W , 0 )
# remove np . inf and np . nan
W [ np . logical_or ( np . where ( np . isinf ( W ) ) , np . where ( np . isnan ( W ) ) ) ] = 0
# ensure exact binarity
u = np . unique ( W )
if np . all ( np . logical_or ( np . abs ( u ) < 1e-8 , np . abs ( u - ... |
def get_contents ( self , path , ref = github . GithubObject . NotSet ) :
""": calls : ` GET / repos / : owner / : repo / contents / : path < http : / / developer . github . com / v3 / repos / contents > ` _
: param path : string
: param ref : string
: rtype : : class : ` github . ContentFile . ContentFile `"... | return self . get_file_contents ( path , ref ) |
def send_script_async ( self , conn_id , data , progress_callback , callback ) :
"""Asynchronously send a a script to this IOTile device
Args :
conn _ id ( int ) : A unique identifer that will refer to this connection
data ( string ) : the script to send to the device
progress _ callback ( callable ) : A fu... | try :
context = self . conns . get_context ( conn_id )
except ArgumentError :
callback ( conn_id , self . id , False , "Could not find connection information" )
return
topics = context [ 'topics' ]
context [ 'progress_callback' ] = progress_callback
self . conns . begin_operation ( conn_id , 'script' , call... |
def all_dbs ( self ) :
"""Retrieves a list of all database names for the current client .
: returns : List of database names for the client""" | url = '/' . join ( ( self . server_url , '_all_dbs' ) )
resp = self . r_session . get ( url )
resp . raise_for_status ( )
return response_to_json_dict ( resp ) |
def get_match_history ( start_at_match_id = None , player_name = None , hero_id = None , skill = 0 , date_min = None , date_max = None , account_id = None , league_id = None , matches_requested = None , game_mode = None , min_players = None , tournament_games_only = None , ** kwargs ) :
"""List of most recent 25 ma... | params = { "start_at_match_id" : start_at_match_id , "player_name" : player_name , "hero_id" : hero_id , "skill" : skill , "date_min" : date_min , "date_max" : date_max , "account_id" : account_id , "league_id" : league_id , "matches_requested" : matches_requested , "game_mode" : game_mode , "min_players" : min_players... |
def _VerifyMethodCall ( self ) :
"""Verify the called method is expected .
This can be an ordered method , or part of an unordered set .
Returns :
The expected mock method .
Raises :
UnexpectedMethodCall if the method called was not expected .""" | expected = self . _PopNextMethod ( )
# Loop here , because we might have a MethodGroup followed by another
# group .
while isinstance ( expected , MethodGroup ) :
expected , method = expected . MethodCalled ( self )
if method is not None :
return method
# This is a mock method , so just check equality .... |
def check_ndk_api ( ndk_api , android_api ) :
"""Warn if the user ' s NDK is too high or low .""" | if ndk_api > android_api :
raise BuildInterruptingException ( 'Target NDK API is {}, higher than the target Android API {}.' . format ( ndk_api , android_api ) , instructions = ( 'The NDK API is a minimum supported API number and must be lower ' 'than the target Android API' ) )
if ndk_api < MIN_NDK_API :
warni... |
def delete ( self ) :
"""If a dynamic version , delete it the standard way and remove it from the
inventory , else delete all dynamic versions .""" | if self . dynamic_version_of is None :
self . _delete_dynamic_versions ( )
else :
super ( DynamicFieldMixin , self ) . delete ( )
self . _inventory . srem ( self . dynamic_part ) |
def grid ( children = [ ] , sizing_mode = None , nrows = None , ncols = None ) :
"""Conveniently create a grid of layoutable objects .
Grids are created by using ` ` GridBox ` ` model . This gives the most control over
the layout of a grid , but is also tedious and may result in unreadable code in
practical a... | row = namedtuple ( "row" , [ "children" ] )
col = namedtuple ( "col" , [ "children" ] )
def flatten ( layout ) :
Item = namedtuple ( "Item" , [ "layout" , "r0" , "c0" , "r1" , "c1" ] )
Grid = namedtuple ( "Grid" , [ "nrows" , "ncols" , "items" ] )
def gcd ( a , b ) :
a , b = abs ( a ) , abs ( b )
... |
def save_xml ( self , doc , element ) :
'''Save this message _ sending object into an xml . dom . Element object .''' | for cond in self . _targets :
new_element = doc . createElementNS ( RTS_NS , RTS_NS_S + 'targets' )
new_element . setAttributeNS ( XSI_NS , XSI_NS_S + 'type' , 'rtsExt:condition_ext' )
cond . save_xml ( doc , new_element )
element . appendChild ( new_element ) |
def get_symbol ( num_classes = 20 , nms_thresh = 0.5 , force_suppress = False , nms_topk = 400 , ** kwargs ) :
"""Single - shot multi - box detection with VGG 16 layers ConvNet
This is a modified version , with fc6 / fc7 layers replaced by conv layers
And the network is slightly smaller than original VGG 16 net... | net = get_symbol_train ( num_classes )
cls_preds = net . get_internals ( ) [ "multibox_cls_pred_output" ]
loc_preds = net . get_internals ( ) [ "multibox_loc_pred_output" ]
anchor_boxes = net . get_internals ( ) [ "multibox_anchors_output" ]
cls_prob = mx . symbol . softmax ( data = cls_preds , axis = 1 , name = 'cls_p... |
def _iterate_fields_cond ( self , pkt , val , use_val ) :
"""Internal function used by _ find _ fld _ pkt & _ find _ fld _ pkt _ val""" | # Iterate through the fields
for fld , cond in self . flds :
if isinstance ( cond , tuple ) :
if use_val :
if cond [ 1 ] ( pkt , val ) :
return fld
continue
else :
cond = cond [ 0 ]
if cond ( pkt ) :
return fld
return self . dflt |
def _filter_commands ( ctx , commands = None ) :
"""Return list of used commands .""" | lookup = getattr ( ctx . command , 'commands' , { } )
if not lookup and isinstance ( ctx . command , click . MultiCommand ) :
lookup = _get_lazyload_commands ( ctx . command )
if commands is None :
return sorted ( lookup . values ( ) , key = lambda item : item . name )
names = [ name . strip ( ) for name in com... |
def unlink ( self , req , parent , name ) :
"""Remove a file
Valid replies :
reply _ err""" | self . reply_err ( req , errno . EROFS ) |
def set_weights ( self , weights_values : dict , ignore_missing = False ) :
"""Sets the weights values of the network .
: param weights _ values : dictionary with weights for each layer""" | network_name = self . __class__ . __name__ . lower ( )
with tf . variable_scope ( network_name ) :
for layer_name in weights_values :
with tf . variable_scope ( layer_name , reuse = True ) :
for param_name , data in weights_values [ layer_name ] . items ( ) :
try :
... |
def merge_subtokens ( doc , label = "subtok" ) :
"""Merge subtokens into a single token .
doc ( Doc ) : The Doc object .
label ( unicode ) : The subtoken dependency label .
RETURNS ( Doc ) : The Doc object with merged subtokens .
DOCS : https : / / spacy . io / api / pipeline - functions # merge _ subtokens... | merger = Matcher ( doc . vocab )
merger . add ( "SUBTOK" , None , [ { "DEP" : label , "op" : "+" } ] )
matches = merger ( doc )
spans = [ doc [ start : end + 1 ] for _ , start , end in matches ]
with doc . retokenize ( ) as retokenizer :
for span in spans :
retokenizer . merge ( span )
return doc |
def save ( filename , data , format = None , ** kwargs ) :
'''save ( filename , data ) writes the given data to the given filename then yieds that filename .
save ( filename , data , format ) specifies that the given format should be used ; this should be the
name of the exporter ( though a file extension that ... | filename = os . path . expanduser ( os . path . expandvars ( filename ) )
if format is None :
format = guess_export_format ( filename , data , ** kwargs )
if format is None :
raise ValueError ( 'Could not deduce export format for file %s' % filename )
else :
format = format . lower ( )
if format... |
def system_call ( command ) :
"""Run a command and return stdout .
Would be better to use subprocess . check _ output , but this works on 2.6,
which is still the system Python on CentOS 7.""" | p = subprocess . Popen ( [ command ] , stdout = subprocess . PIPE , shell = True )
return p . stdout . read ( ) |
def _make_tasks_unique ( tasks ) :
"""If some tasks of the workflow are the same they are deep copied .""" | unique_tasks = [ ]
prev_tasks = set ( )
for task in tasks :
if task in prev_tasks :
task = copy . deepcopy ( task )
unique_tasks . append ( task )
return unique_tasks |
def get_file_hash ( storage , path ) :
"""Create md5 hash from file contents .""" | contents = storage . open ( path ) . read ( )
file_hash = hashlib . md5 ( contents ) . hexdigest ( )
# Check if content should be gzipped and hash gzipped content
content_type = mimetypes . guess_type ( path ) [ 0 ] or 'application/octet-stream'
if settings . is_gzipped and content_type in settings . gzip_content_types... |
def port ( self , container , private_port ) :
"""Lookup the public - facing port that is NAT - ed to ` ` private _ port ` ` .
Identical to the ` ` docker port ` ` command .
Args :
container ( str ) : The container to look up
private _ port ( int ) : The private port to inspect
Returns :
( list of dict ... | res = self . _get ( self . _url ( "/containers/{0}/json" , container ) )
self . _raise_for_status ( res )
json_ = res . json ( )
private_port = str ( private_port )
h_ports = None
# Port settings is None when the container is running with
# network _ mode = host .
port_settings = json_ . get ( 'NetworkSettings' , { } )... |
def list_kadastrale_afdelingen ( self ) :
'''List all ` kadastrale afdelingen ` in Flanders .
: param integer sort : Field to sort on .
: rtype : A : class : ` list ` of : class : ` Afdeling ` .''' | def creator ( ) :
gemeentes = self . list_gemeenten ( )
res = [ ]
for g in gemeentes :
res += self . list_kadastrale_afdelingen_by_gemeente ( g )
return res
if self . caches [ 'permanent' ] . is_configured :
key = 'list_afdelingen_rest'
afdelingen = self . caches [ 'permanent' ] . get_or... |
def get_remote ( self , key , default = None , scope = None ) :
"""Get data from the remote end ( s ) of the : class : ` Conversation ` with the given scope .
In Python , this is equivalent to : :
relation . conversation ( scope ) . get _ remote ( key , default )
See : meth : ` conversation ` and : meth : ` C... | return self . conversation ( scope ) . get_remote ( key , default ) |
def generic_visit ( self , node ) :
"""Implement generic node .""" | # [ [ [ cog
# cog . out ( " print ( pcolor ( ' Enter generic visitor ' , ' magenta ' ) ) " )
# [ [ [ end ] ] ]
# A generic visitor that potentially closes callables is needed to
# close enclosed callables that are not at the end of the enclosing
# callable , otherwise the ending line of the enclosed callable would
# be... |
def all_slots ( self , cls : CLASS_OR_CLASSNAME , * , cls_slots_first : bool = False ) -> List [ SlotDefinition ] :
"""Return all slots that are part of the class definition . This includes all is _ a , mixin and apply _ to slots
but does NOT include slot _ usage targets . If class B has a slot _ usage entry for ... | def merge_definitions ( cls_name : Optional [ ClassDefinitionName ] ) -> None :
if cls_name :
for slot in self . all_slots ( cls_name ) :
aliased_name = self . aliased_slot_name ( slot )
if aliased_name not in known_slots :
known_slots . add ( aliased_name )
... |
def merge_options_to_dict ( options ) :
"""Given a collection of Option objects or partial option dictionaries ,
merge everything to a single dictionary .""" | merged_options = { }
for obj in options :
if isinstance ( obj , dict ) :
new_opts = obj
else :
new_opts = { obj . key : obj . kwargs }
merged_options = merge_option_dicts ( merged_options , new_opts )
return merged_options |
def _add_ticks ( xticks = True , yticks = True ) :
"""NAME :
_ add _ ticks
PURPOSE :
add minor axis ticks to a plot
INPUT :
( none ; works on the current axes )
OUTPUT :
( none ; works on the current axes )
HISTORY :
2009-12-23 - Written - Bovy ( NYU )""" | ax = pyplot . gca ( )
if xticks :
xstep = ax . xaxis . get_majorticklocs ( )
xstep = xstep [ 1 ] - xstep [ 0 ]
ax . xaxis . set_minor_locator ( ticker . MultipleLocator ( xstep / 5. ) )
if yticks :
ystep = ax . yaxis . get_majorticklocs ( )
ystep = ystep [ 1 ] - ystep [ 0 ]
ax . yaxis . set_mino... |
def shannon_entropy ( data , iterator ) :
"""Borrowed from http : / / blog . dkbza . org / 2007/05 / scanning - data - for - entropy - anomalies . html""" | if not data :
return 0
entropy = 0
for x in iterator :
p_x = float ( data . count ( x ) ) / len ( data )
if p_x > 0 :
entropy += - p_x * math . log ( p_x , 2 )
return entropy |
def convert_weights ( wgts : Weights , stoi_wgts : Dict [ str , int ] , itos_new : Collection [ str ] ) -> Weights :
"Convert the model ` wgts ` to go with a new vocabulary ." | dec_bias , enc_wgts = wgts . get ( '1.decoder.bias' , None ) , wgts [ '0.encoder.weight' ]
wgts_m = enc_wgts . mean ( 0 )
if dec_bias is not None :
bias_m = dec_bias . mean ( 0 )
new_w = enc_wgts . new_zeros ( ( len ( itos_new ) , enc_wgts . size ( 1 ) ) ) . zero_ ( )
if dec_bias is not None :
new_b = dec_bias ... |
def SetConsoleTextAttribute ( stream_id , attrs ) :
"""Set a console text attribute .""" | handle = handles [ stream_id ]
return windll . kernel32 . SetConsoleTextAttribute ( handle , attrs ) |
def initial_finall_mass_relation ( self , marker = 'o' , linestyle = '--' ) :
'''INtiial to final mass relation''' | final_m = [ ]
ini_m = [ ]
for i in range ( len ( self . runs_H5_surf ) ) :
sefiles = se ( self . runs_H5_out [ i ] )
ini_m . append ( sefiles . get ( "mini" ) )
h1 = sefiles . get ( int ( sefiles . se . cycles [ - 2 ] ) , 'H-1' )
mass = sefiles . get ( int ( sefiles . se . cycles [ - 2 ] ) , 'mass' )
... |
def set_vars ( env ) :
"""Set MWCW _ VERSION , MWCW _ VERSIONS , and some codewarrior environment vars
MWCW _ VERSIONS is set to a list of objects representing installed versions
MWCW _ VERSION is set to the version object that will be used for building .
MWCW _ VERSION can be set to a string during Environme... | desired = env . get ( 'MWCW_VERSION' , '' )
# return right away if the variables are already set
if isinstance ( desired , MWVersion ) :
return 1
elif desired is None :
return 0
versions = find_versions ( )
version = None
if desired :
for v in versions :
if str ( v ) == desired :
version... |
def conformPadding ( cls , chars ) :
"""Ensure alternate input padding formats are conformed
to formats defined in PAD _ MAP
If chars is already a format defined in PAD _ MAP , then
it is returned unmodified .
Example : :
' % 04d ' - > ' # '
Args :
chars ( str ) : input padding chars
Returns :
str... | pad = chars
if pad and pad [ 0 ] not in PAD_MAP :
pad = cls . getPaddingChars ( cls . getPaddingNum ( pad ) )
return pad |
def uncamel ( name ) :
"""converts camelcase to underscore
> > > uncamel ( ' fooBar ' )
' foo _ bar '
> > > uncamel ( ' FooBar ' )
' foo _ bar '
> > > uncamel ( ' _ fooBar ' )
' _ foo _ bar '
> > > uncamel ( ' _ FooBar ' )
' _ _ foo _ bar '""" | response , name = name [ 0 ] . lower ( ) , name [ 1 : ]
for n in name :
if n . isupper ( ) :
response += '_' + n . lower ( )
else :
response += n
return response |
def read_str ( delim = ',' , * lines ) :
"""This function is similar to read _ csv , but it reads data from the
list of < lines > .
fd = open ( " foo " , " r " )
data = chart _ data . read _ str ( " , " , fd . readlines ( ) )""" | data = [ ]
for line in lines :
com = parse_line ( line , delim )
data . append ( com )
return data |
def changes ( self ) :
"""Dumber version of ' patch ' method""" | deprecation_msg = 'Model.changes will be removed in warlock v2'
warnings . warn ( deprecation_msg , DeprecationWarning , stacklevel = 2 )
return copy . deepcopy ( self . __dict__ [ 'changes' ] ) |
def write_pascal_results ( self , all_boxes ) :
"""write results files in pascal devkit path
Parameters :
all _ boxes : list
boxes to be processed [ bbox , confidence ]
Returns :
None""" | for cls_ind , cls in enumerate ( self . classes ) :
print ( 'Writing {} VOC results file' . format ( cls ) )
filename = self . get_result_file_template ( ) . format ( cls )
with open ( filename , 'wt' ) as f :
for im_ind , index in enumerate ( self . image_set_index ) :
dets = all_boxes ... |
def apci_contents ( self , use_dict = None , as_class = dict ) :
"""Return the contents of an object as a dict .""" | if _debug :
APCI . _debug ( "apci_contents use_dict=%r as_class=%r" , use_dict , as_class )
# make / extend the dictionary of content
if use_dict is None :
use_dict = as_class ( )
# copy the source and destination to make it easier to search
if self . pduSource :
use_dict . __setitem__ ( 'source' , str ( se... |
def parse_nni_function ( code ) :
"""Parse ` nni . function _ choice ` expression .
Return the AST node of annotated expression and a list of dumped function call expressions .
code : annotation string""" | name , call = parse_annotation_function ( code , 'function_choice' )
funcs = [ ast . dump ( func , False ) for func in call . args ]
convert_args_to_dict ( call , with_lambda = True )
name_str = astor . to_source ( name ) . strip ( )
call . keywords [ 0 ] . value = ast . Str ( s = name_str )
return call , funcs |
def gen_hot_url ( hot_index , page = 1 ) :
"""拼接 首页热门文章 URL
Parameters
hot _ index : WechatSogouConst . hot _ index
首页热门文章的分类 ( 常量 ) : WechatSogouConst . hot _ index . xxx
page : int
页数
Returns
str
热门文章分类的url""" | assert hasattr ( WechatSogouConst . hot_index , hot_index )
assert isinstance ( page , int ) and page > 0
index_urls = { WechatSogouConst . hot_index . hot : 0 , # 热门
WechatSogouConst . hot_index . gaoxiao : 1 , # 搞笑
WechatSogouConst . hot_index . health : 2 , # 养生
WechatSogouConst . hot_index . sifanghua : 3 , # 私房话
W... |
def get_objective_objective_bank_session ( self , proxy ) :
"""Gets the session for retrieving objective to objective bank mappings .
: param proxy : a proxy
: type proxy : ` ` osid . proxy . Proxy ` `
: return : an ` ` ObjectiveObjectiveBankSession ` `
: rtype : ` ` osid . learning . ObjectiveObjectiveBank... | if not self . supports_objective_objective_bank ( ) :
raise Unimplemented ( )
try :
from . import sessions
except ImportError :
raise OperationFailed ( )
proxy = self . _convert_proxy ( proxy )
try :
session = sessions . ObjectiveObjectiveBankSession ( proxy = proxy , runtime = self . _runtime )
except ... |
def configure_widget_for_editing ( self , widget ) :
"""A widget have to be added to the editor , it is configured here in order to be conformant
to the editor""" | if not 'editor_varname' in widget . attributes :
return
widget . onclick . do ( self . on_widget_selection )
# setup of the on _ dropped function of the widget in order to manage the dragNdrop
widget . __class__ . on_dropped = on_dropped
# drag properties
# widget . style [ ' resize ' ] = ' both '
widget . style [ ... |
def suck_out_editions ( reporters ) :
"""Builds a dictionary mapping edition keys to their root name .
The dictionary takes the form of :
" A . " : " A . " ,
" A . 2d " : " A . " ,
" A . 3d " : " A . " ,
" A . D . " : " A . D . " ,
In other words , this lets you go from an edition match to its parent ke... | editions_out = { }
for reporter_key , data_list in reporters . items ( ) : # For each reporter key . . .
for data in data_list : # For each book it maps to . . .
for edition_key , edition_value in data [ "editions" ] . items ( ) :
try :
editions_out [ edition_key ]
ex... |
def key_pair_name ( i , region , project_id , ssh_user ) :
"""Returns the ith default gcp _ key _ pair _ name .""" | key_name = "{}_gcp_{}_{}_{}" . format ( RAY , region , project_id , ssh_user , i )
return key_name |
def _op_generic_StoU_saturation ( self , value , min_value , max_value ) : # pylint : disable = no - self - use
"""Return unsigned saturated BV from signed BV .
Min and max value should be unsigned .""" | return claripy . If ( claripy . SGT ( value , max_value ) , max_value , claripy . If ( claripy . SLT ( value , min_value ) , min_value , value ) ) |
def extend ( self , key , values , * , section = DataStoreDocumentSection . Data ) :
"""Extends a list in the data store with the elements of values .
Args :
key ( str ) : The key pointing to the value that should be stored / updated .
It supports MongoDB ' s dot notation for nested fields .
values ( list )... | key_notation = '.' . join ( [ section , key ] )
if not isinstance ( values , list ) :
return False
result = self . _collection . update_one ( { "_id" : ObjectId ( self . _workflow_id ) } , { "$push" : { key_notation : { "$each" : self . _encode_value ( values ) } } , "$currentDate" : { "lastModified" : True } } )
r... |
def get_connections ( self , data = True ) :
"""Return connections from all the agents in the environment .
: param bool data :
If ` ` True ` ` return also the dictionary associated with each
connection
: returns :
A list of ` ` ( addr , connections ) ` ` - tuples , where ` ` connections ` ` is
a list o... | connections = [ ]
for a in self . get_agents ( addr = False ) :
c = ( a . addr , a . get_connections ( data = data ) )
connections . append ( c )
return connections |
def decrypt ( keyfile_json , password ) :
'''Decrypts a private key that was encrypted using an Ethereum client or
: meth : ` ~ Account . encrypt ` .
: param keyfile _ json : The encrypted key
: type keyfile _ json : dict or str
: param str password : The password that was used to encrypt the key
: return... | if isinstance ( keyfile_json , str ) :
keyfile = json . loads ( keyfile_json )
elif is_dict ( keyfile_json ) :
keyfile = keyfile_json
else :
raise TypeError ( "The keyfile should be supplied as a JSON string, or a dictionary." )
password_bytes = text_if_str ( to_bytes , password )
return HexBytes ( decode_k... |
def removeComments ( self , comment = None ) :
"""Inserts comments into the editor based on the current selection . If no comment string is supplied , then the comment from the language will be used .
: param comment | < str > | | None
: return < bool > | success""" | if ( not comment ) :
lang = self . language ( )
if ( lang ) :
comment = lang . lineComment ( )
if ( not comment ) :
return False
startline , startcol , endline , endcol = self . getSelection ( )
len_comment = len ( comment )
line , col = self . getCursorPosition ( )
for lineno in range ( startline ,... |
def _send_to_all_rooms ( self , message ) :
"""Send a message to all connected rooms""" | for room in self . _rooms . values ( ) :
room . send_message ( message ) |
def dist0 ( n , method = 'lin_square' ) :
"""Compute standard cost matrices of size ( n , n ) for OT problems
Parameters
n : int
size of the cost matrix
method : str , optional
Type of loss matrix chosen from :
* ' lin _ square ' : linear sampling between 0 and n - 1 , quadratic loss
Returns
M : np ... | res = 0
if method == 'lin_square' :
x = np . arange ( n , dtype = np . float64 ) . reshape ( ( n , 1 ) )
res = dist ( x , x )
return res |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.