Dataset Viewer
Auto-converted to Parquet
code
stringlengths
0
30.8k
source
stringclasses
6 values
language
stringclasses
9 values
__index_level_0__
int64
0
100k
saveExisting(event) { event.preventDefault(); this.props.saveLoopThunk(this.props.sounds, this.props.loopId); toast('Loop Saved!', { position: 'bottom-right', autoClose: 2000 }); }
function
javascript
0
public boolean isModletIncluded( final Modlet modlet ) { if ( modlet == null ) { throw new NullPointerException( "modlet" ); } for ( final NameType include : this.getModletIncludes() ) { if ( include.getName().equals( modlet.getName() ) ) { return true; } } return this.getModletIncludes().isEmpty(); }
function
java
1
public static string CreateMessageForInaccessibleType(Type inaccessibleType, Type typeToProxy) { var targetAssembly = typeToProxy.Assembly; string inaccessibleTypeDescription = inaccessibleType == typeToProxy ? "it" : "type " + inaccessibleType.GetBestName(); var messageFormat = "Can not create proxy for type {0} because {1} is not accessible. "; var message = string.Format(messageFormat, typeToProxy.GetBestName(), inaccessibleTypeDescription); var instructions = CreateInstructionsToMakeVisible(targetAssembly); return message + instructions; }
function
c#
2
def app(): app = create_app() db_fd, db_filepath = tempfile.mkstemp(suffix=".sqlite3") app.config["DATABASE"] = { "name": db_filepath, "engine": "peewee.SqliteDatabase", } db.init_app(app) with db.database.bind_ctx(MODELS): db.database.create_tables(MODELS) db.close_db(None) with app.app_context(): yield app db.close_db(None) os.close(db_fd) os.remove(db_filepath)
function
python
3
public class ArmstrongNumber { public static void main(String[] args) { Integer givenNumber = 153; Boolean isArmstrongNumber = isArmstrongNumber(givenNumber); if (isArmstrongNumber) { System.out.println(givenNumber + " " + "is an Armstrong number."); } else { System.out.println(givenNumber + " " + "is not an Armstrong number."); } } public static Boolean isArmstrongNumber(Integer givenNumber) { Boolean isArmstrongNumber = false; Integer endValue = 0, number = givenNumber; if (number == null || number < 0) { return false; } //calculating if given number is an Armstrong number while (number > 0) { endValue += (number % 10) * (number % 10) * (number % 10); number = number / 10; } if (endValue.equals(givenNumber)) { isArmstrongNumber = true; } return isArmstrongNumber; } }
class
java
4
private IEnumerator<ITask> LightsControlLoop() { int i = 0; List<pololumaestro.ChannelValuePair> channelValues = new List<pololumaestro.ChannelValuePair>(); while (!_state.Dropping) { switch (lightsTestMode) { case 0: for (byte j = 0; j < ServoChannelMap.channelsCount; j++) { if (!ServoChannelMap.notLightChannels.Contains(j)) { bool lightOn = lights[j]; if (lightOn && !_lightsHelper.IsOn(j) || !lightOn && !_lightsHelper.IsOff(j)) { ushort target = (ushort)((lightOn ? 2000 : 1000) << 2); channelValues.Add(new pololumaestro.ChannelValuePair() { Channel = j, Target = target }); } } } break; case 1: { ushort target = (ushort)(((i % 2 == 0) ? 2000 : 1000) << 2); for (byte j = 0; j < ServoChannelMap.channelsCount; j++) { if (!ServoChannelMap.notLightChannels.Contains(j)) { channelValues.Add(new pololumaestro.ChannelValuePair() { Channel = j, Target = target }); } } } break; } if (channelValues.Any()) { LightsSet(channelValues); channelValues = new List<pololumaestro.ChannelValuePair>(); } i++; yield return TimeoutPort(LightsControlWaitIntervalMs).Receive(); } }
function
c#
5
private XDataFrameGroupingCols<R,C> combine(XDataFrameGroupingCols<R,C> other) { other.groupKeysMap.forEach((groupKey1, groupKeys1) -> { final Array<C> groupKeys2 = this.groupKeysMap.get(groupKey1); if (groupKeys2 == null) { this.groupKeysMap.put(groupKey1, groupKeys1); } else { final Class<C> type = groupKeys1.type(); final int length = groupKeys2.length(); final int newLength = length + groupKeys1.length(); final Array<C> combinedKeys = Array.of(type, newLength); combinedKeys.update(0, groupKeys2, 0, groupKeys2.length()); combinedKeys.update(length, groupKeys1, 0, groupKeys1.length()); this.groupKeysMap.put(groupKey1, combinedKeys); } }); return this; }
function
java
6
public IEnumerable<TModel> Create<TModel>(ICollection<TModel> itemsToCreate) where TModel : EndpointModelBase { string requestXml = ModelSerializer.Serialize(itemsToCreate); string responseXml = _proxy.CreateElements(typeof(TModel).Name, requestXml); Response response = ModelSerializer.DeserializeTo<Response>(responseXml); return response.GetTypedProperty<TModel>(); }
function
c#
7
public void AddBlueprint(string blueprintId, Blueprint blueprint) { if (blueprintId == null) { throw new ArgumentNullException("blueprintId", "No blueprint id provided."); } if (string.IsNullOrEmpty(blueprintId)) { throw new ArgumentException("No blueprint id provided.", "blueprintId"); } if (blueprint == null) { throw new ArgumentNullException("blueprint", "No blueprint provided."); } if (this.ContainsBlueprint(blueprintId)) { throw new ArgumentException( string.Format("A blueprint with this id already exists: {0}", blueprintId), "blueprintId"); } this.blueprints.Add(blueprintId, blueprint); this.OnBlueprintsChanged(); }
function
c#
8
@Override public void run(ApplicationArguments args) throws Exception { System.out.println("Beans:"); Arrays.stream(appContext.getBeanDefinitionNames()).forEach(System.out::println); System.out.println("==================="); System.out.println("Demonstrate how you can use the applicationContext directly to retrieve beans also."); System.out.println("HelloService: " + appContext.getBean("helloService", HelloService.class).sayHello("Steven")); }
function
java
9
def add(self, other): self._previous = self._current.copy() if isinstance(other, set): self._current.update(other) else: if other in self._current: self._current.add('GROUP') else: if len(self._current) != 0: self._current = {'MIXED'} else: self._current = {other}
function
python
10
char *rfctimestamp(u32 stamp) { time_t atime = stamp; struct tm tmbuf, *tp; if (IS_INVALID_TIME_T(atime)) { gpg_err_set_errno(EINVAL); return NULL; } tp = gnupg_gmtime(&atime, &tmbuf); if (!tp) return NULL; return xtryasprintf( "%.3s, %02d %.3s %04d %02d:%02d:%02d +0000", &"SunMonTueWedThuFriSat"[(tp->tm_wday % 7) * 3], tp->tm_mday, &"JanFebMarAprMayJunJulAugSepOctNovDec"[(tp->tm_mon % 12) * 3], tp->tm_year + 1900, tp->tm_hour, tp->tm_min, tp->tm_sec); }
function
c++
11
static int btreeBtSharedGet( BtShared **ppBt, const char *fullname, sqlite3 *db, int vfsFlags) { Btree *pExisting; BtShared *next_bt; int iDb; char *fullBuf[BT_MAX_PATH]; #ifdef SQLITE_DEBUG sqlite3_mutex *mutexOpen = sqlite3MutexAlloc(SQLITE_MUTEX_STATIC_OPEN); assert(sqlite3_mutex_held(mutexOpen)); #endif *ppBt = NULL; for (next_bt = g_btshared_list; next_bt != NULL; next_bt = next_bt->pNextBtS) { assert(next_bt->nRef > 0); if (strcmp(fullname, next_bt->full_name) == 0) { if (vfsFlags & SQLITE_OPEN_SHAREDCACHE) { for (iDb = db->nDb - 1; iDb >= 0; iDb--) { pExisting = db->aDb[iDb].pBt; if (pExisting && pExisting->pBt == next_bt) return SQLITE_CONSTRAINT; } } *ppBt = next_bt; sqlite3_mutex_enter(next_bt->mutex); next_bt->nRef++; sqlite3_mutex_leave(next_bt->mutex); break; } } return SQLITE_OK; }
function
c
12
public class ValidateVObject implements Validate<Object, VObject> { public boolean validate(final AnnotationMetaData annotationMetaData, Object value, VObject annotation, ViolationInfoHandler violationInfoHandler) { if (value == null) { if (annotation.mandatory()) { violationInfoHandler.addMessageForNullPointer(annotationMetaData, annotation.message()); return false; } return true; } final BeanValidator beanValidator = BeanBeanValidatorImpl.getInstance(); final List<ViolationInfo<Object>> violationInfos = beanValidator.validate(value); if (violationInfos.size() > 0) { for (final ViolationInfo<Object> violationInfo : violationInfos) { violationInfoHandler.addMessage(violationInfo.getMethodName(), violationInfo.getMessage(), violationInfo.getInvalidValue()); } return false; } return true; } }
class
java
13
async def rutrivia_leaderboard_global( self, ctx: commands.Context, sort_by: str = "wins", top: int = 10 ): key = self._get_sort_key(sort_by) if key is None: await ctx.send( _( "Unknown field `{field_name}`, see `{prefix}help rutrivia leaderboard server` " "for valid fields to sort by." ).format(field_name=sort_by, prefix=ctx.clean_prefix) ) return data = await self.config.all_members() collated_data = {} for guild_id, guild_data in data.items(): guild = ctx.bot.get_guild(guild_id) if guild is None: continue for member_id, member_data in guild_data.items(): member = guild.get_member(member_id) if member is None: continue collated_member_data = collated_data.get(member, Counter()) for v_key, value in member_data.items(): collated_member_data[v_key] += value collated_data[member] = collated_member_data await self.send_leaderboard(ctx, collated_data, key, top)
function
python
14
async function buyStarByToken() { const {lookUptokenIdToStarInfo, buyStar} = props.instance.methods; const tokenId = parseInt(tokenHash.value, 16); if (isNaN(tokenId)) { alertMsg({msg: `${tokenHash.value} is not a valid ID`, variant: 'warning'}); } else { try { const starInfo = await lookUptokenIdToStarInfo(tokenId).call(); if (starInfo) { console.log('Using account', props.account); await buyStar(tokenId).send({from: props.account, gas: 500000}); alertMsg({msg: `✅ ${starInfo} has been put up for sale`, variant: 'success'}); } else { alertMsg({msg: `${tokenHash.value} doesn't exist`, variant: 'warning'}); } } catch (e) { console.error('putStarUpForSale failed', e); alertMsg({ msg: "Unexpected error. Hint: If using development network you may need to 'migrate --reset' the contract in truffle 😊", variant: "primary" }) } } }
function
javascript
15
private Node box(Node node, int modifier) { if (node.getModifier() == Node.ONE_OF || node.getModifier() == Node.ONE_TERMINAL_OF) { return nodeCreator.oneNode(listOf(node)).clone(modifier); } if (node.getModifier() == Node.ZERO_OR_MORE) { return node; } return node.clone(modifier); }
function
java
16
def generate_slack_report(self): _report = "" _report = _report + self.generate_header() + "\n" _report = _report + self.generate_metadata() + "\n" _report = _report + self.generate_summary() + "\n" _full_body = self.generate_body_full() if (len(_report) + len(_full_body)) < 4000: _report = _report + _full_body else: _short_body = self.generate_body_short() if (len(_report) + len(_short_body)) < 4000: _report = _report + _short_body return _report
function
python
17
internal void DerivePhysicsTransformation(out Matrix derivedTransformation) { Matrix rotation; Vector3 translation; Vector3 scale; Entity.Transform.WorldMatrix.Decompose(out scale, out rotation, out translation); var translationMatrix = Matrix.Translation(translation); Matrix.Multiply(ref rotation, ref translationMatrix, out derivedTransformation); if (CanScaleShape) { if (ColliderShape.Scaling != scale) { ColliderShape.Scaling = scale; } } if (ColliderShape.LocalOffset != Vector3.Zero || ColliderShape.LocalRotation != Quaternion.Identity) { derivedTransformation = Matrix.Multiply(ColliderShape.PositiveCenterMatrix, derivedTransformation); } if (DebugEntity == null) return; derivedTransformation.Decompose(out scale, out rotation, out translation); DebugEntity.Transform.Position = translation; DebugEntity.Transform.Rotation = Quaternion.RotationMatrix(rotation); }
function
c#
18
private List<ProcessResultMessage> ValidateFHS(ReadOnlySpan<char> fhsSegment) { var errorMessages = new List<ProcessResultMessage>(); if (fhsSegment.Length >= 8) { ReadOnlySpan<char> fhsEncodingCharacters = fhsSegment.Slice(3, 5); if (fhsEncodingCharacters[0] != '|' && fhsEncodingCharacters[1] != '^' && fhsEncodingCharacters[2] != '~' && fhsEncodingCharacters[3] != '/' && fhsEncodingCharacters[4] != '&') { ProcessResultMessage errorMessage = new ProcessResultMessage() { ErrorCode = "0003", Severity = Severity.Error, Content = @"FHS segment does not contain the required encoding characters. Expected: |^~\&" }; errorMessages.Add(errorMessage); } } else { ProcessResultMessage errorMessage = new ProcessResultMessage() { ErrorCode = "0003", Severity = Severity.Error, Content = @"FHS segment is malformed" }; errorMessages.Add(errorMessage); } return errorMessages; }
function
c#
19
function buffer(func, timeBetweenCalls, bufferSize) { const bufferingOn = (bufferSize !== undefined && bufferSize > 0); const argBuffer = (bufferingOn) ? [] : undefined; const addToBuffer = (item)=>{ if (argBuffer.length >= bufferSize) { argBuffer.pop(); } argBuffer.unshift(item); }; let blocked = false; return function throttledFn(...args) { if (blocked) { if (bufferingOn) addToBuffer(args); return; } blocked = true; const onTimeout = ()=>{ blocked = false; if (bufferingOn && argBuffer.length > 0) { throttledFn.apply(this, argBuffer.pop()); } }; window.setTimeout(onTimeout, timeBetweenCalls); return func.apply(this, args); }; }
function
javascript
20
IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext) { if(this._Bill < (int)0) { yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for _Bill, must be a value greater than or equal to 0.", new [] { "_Bill" }); } if(this.TotalSum < (decimal)0) { yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for TotalSum, must be a value greater than or equal to 0.", new [] { "TotalSum" }); } yield break; }
function
c#
21
def _check_and_get_mask(logu, joint_sample_mask=None, validate_args=False): with tf.name_scope('get_default_mask'): logu = tf.convert_to_tensor( logu, dtype_hint=tf.float32, name='logu') if joint_sample_mask is None: num_rows, num_cols = tf.unstack(tf.shape(logu)[-2:]) joint_sample_mask = tf.eye(num_rows, num_cols, dtype=tf.bool) else: joint_sample_mask = tf.convert_to_tensor( joint_sample_mask, dtype_hint=tf.bool, name='joint_sample_mask') with tf.control_dependencies( _check_mask_shape(logu, joint_sample_mask) if validate_args else []): joint_sample_mask = tf.identity(joint_sample_mask) return logu, joint_sample_mask
function
python
22
[MethodImpl(MethodImplOptions.NoInlining)] private static ExitType OldOMBuildProject(ExitType exitType, string projectFile, string[] targets, string toolsVersion, Dictionary<string, string> globalProperties, ILogger[] loggers, LoggerVerbosity verbosity, bool needToValidateProject, string schemaFile, int cpuCount) { Console.WriteLine(AssemblyResources.GetString("Using35Engine")); Microsoft.Build.BuildEngine.BuildPropertyGroup oldGlobalProps = new Microsoft.Build.BuildEngine.BuildPropertyGroup(); foreach (KeyValuePair<string, string> globalProp in globalProperties) { oldGlobalProps.SetProperty(globalProp.Key, globalProp.Value); } if (!BuildProjectWithOldOM(projectFile, targets, toolsVersion, oldGlobalProps, loggers, verbosity, null, needToValidateProject, schemaFile, cpuCount)) { exitType = ExitType.BuildError; } return exitType; }
function
c#
23
def extract_narr_aux_data(espa_metadata, aux_path): logger = logging.getLogger(__name__) (dummy, t0_date, t1_date) = util.NARR.dates(espa_metadata) logger.info('Before Date = {}'.format(str(t0_date))) logger.info(' After Date = {}'.format(str(t1_date))) for aux_set in aux_filenames(aux_path, PARMS_TO_EXTRACT, t0_date, t1_date): logger.info('Using {0}'.format(aux_set.hdr)) logger.info('Using {0}'.format(aux_set.grb)) if (not os.path.exists(aux_set.hdr) or not os.path.exists(aux_set.grb)): raise Exception('Required ST AUX files are missing') extract_from_grib(aux_set)
function
python
24
public class EnvironmentNameResolver { private static volatile EnvironmentNameResolver defaultEnvironmentNameResolver = new EnvironmentNameResolver(); /** * Set default EnvironmentNameResolver instance. * * @param resolver new default resolver */ public static void setDefaultEnvironmentNameResolver(EnvironmentNameResolver resolver) { defaultEnvironmentNameResolver = resolver; } public static EnvironmentNameResolver getDefaultEnvironmentNameResolver() { return defaultEnvironmentNameResolver; } public String getEnvironmentName() { String environment = System.getProperty("config.environment"); if (environment != null && !environment.trim().isEmpty()) { return environment; } try { return InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { ConfigLog.warn("EnvironmentNameResolver: Unable to determine host name, using null for environment"); return null; } } }
class
java
25
def predict_sequence(self,sequence,pred,peptideList=False): if not peptideList: peptide_list = get_peptides(sequence, self.length) else: peptide_list = sequence scores = self.predict_peptide_list(peptide_list) return scores
function
python
26
public class InterceptingReduxMiddleware : IReduxMiddleware { private readonly Action<IReduxMessage, IReduxDispatcher> _dispatch; public InterceptingReduxMiddleware(Action<IReduxMessage, IReduxDispatcher> dispatch) { _dispatch = dispatch ?? throw new ArgumentNullException(nameof(dispatch)); } public void Dispatch(IReduxMessage message, IReduxDispatcher next) => _dispatch(message, next); }
class
c#
27
def _assert_internal_invariants(self, fast=True): if self._Xarr is None: assert self._Yarr is None assert self._Xview is None assert self._Yview is None else: assert self._Yarr is not None assert self._Xview is not None assert self._Yview is not None assert len(self._Xview) == self._length assert len(self._Yview) == self._length assert len(self._Xarr) == len(self._Yarr) if not fast: assert np.all(unique_rows(self.X))
function
python
28
[Export(typeof(IVsOptionalService<,>))] internal class VsOptionalService<TService, TInterface> : VsOptionalService<TInterface>, IVsOptionalService<TService, TInterface> { [ImportingConstructor] public VsOptionalService([Import(typeof(SVsServiceProvider))]IServiceProvider serviceProvider, IProjectThreadingService threadingService) : base(serviceProvider, threadingService, typeof(TService)) { } }
class
c#
29
public boolean isOwnerPassword(byte[] ownerPassword, byte[] user, byte[] owner, int permissions, byte[] id, int encRevision, int length, boolean encryptMetadata) throws IOException { if (encRevision == 6 || encRevision == 5) { byte[] truncatedOwnerPassword = truncate127(ownerPassword); byte[] oHash = new byte[32]; byte[] oValidationSalt = new byte[8]; System.arraycopy(owner, 0, oHash, 0, 32); System.arraycopy(owner, 32, oValidationSalt, 0, 8); byte[] hash; if (encRevision == 5) { hash = computeSHA256(truncatedOwnerPassword, oValidationSalt, user); } else { hash = computeHash2A(truncatedOwnerPassword, oValidationSalt, user); } return Arrays.equals(hash, oHash); } else { byte[] userPassword = getUserPassword( ownerPassword, owner, encRevision, length ); return isUserPassword( userPassword, user, owner, permissions, id, encRevision, length, encryptMetadata ); } }
function
java
30
def stategraph(automaton, fmt=None, traversal=None): graph = automaton.__graph__ source_nodes = filter(lambda n: not graph.in_edges(n), graph.nodes()) sink_nodes = filter(lambda n: not graph.out_edges(n), graph.nodes()) sources = [('[*]', node) for node in source_nodes] sinks = [(node, '[*]') for node in sink_nodes] table = get_table(automaton, traversal=traversal) return """@startuml {} {} {} @enduml""".format('\n'.join([' {} --> {}'.format(*row) for row in sources]), '\n'.join([' {} --> {} : {}'.format(*row) for row in table]), '\n'.join([' {} --> {}'.format(*row) for row in sinks]))
function
python
31
def generator(z): with tf.variable_scope("generator"): img = tf.layers.dense(z,4*4*512,activation=tf.nn.relu,use_bias=True) img = tf.layers.batch_normalization(img,axis=1,training=True) img = tf.reshape(img,[-1,4,4,512]) print(img.shape) img = tf.layers.conv2d_transpose(img,256,kernel_size=5,strides=2,activation=tf.nn.relu,padding='same') print(img.shape) img = tf.layers.batch_normalization(img,3,training=True) img = tf.layers.conv2d_transpose(img,128,kernel_size=5,strides=2,activation=tf.nn.relu,padding='same') print(img.shape) img = tf.layers.batch_normalization(img,3,training=True) img = tf.layers.conv2d_transpose(img,3,kernel_size=5,strides=2,activation=tf.nn.tanh,padding='same') print(img.shape) return img
function
python
32
HRESULT ValidateUserObject( POBJECTINFO pObjectInfo ) { NWCONN_HANDLE hConn = NULL; HRESULT hr = S_OK; DWORD dwResumeObjectID = 0xffffffff; if (pObjectInfo->NumComponents != 2) { RRETURN(E_ADS_BAD_PATHNAME); } hr = NWApiGetBinderyHandle( &hConn, pObjectInfo->ComponentArray[0] ); BAIL_ON_FAILURE(hr); hr = NWApiValidateObject( hConn, OT_USER, _wcsupr(pObjectInfo->ComponentArray[1]), &dwResumeObjectID ); BAIL_ON_FAILURE(hr); error: if (hConn) { NWApiReleaseBinderyHandle(hConn); } RRETURN(hr); }
function
c++
33
private void init(final Persistit persistit, final Class clientClass, final boolean mustBeSerializable) { _clazz = clientClass; _persistit = persistit; _serializable = Serializable.class.isAssignableFrom(clientClass); if (_serializable) { _externalizable = Externalizable.class.isAssignableFrom(clientClass); _classDescriptor = ObjectStreamClass.lookup(_clazz); } else if (mustBeSerializable) { throw new ConversionException("Not Serializable: " + clientClass.getName()); } final Class superClass = clientClass.getSuperclass(); if (superClass != null && Serializable.class.isAssignableFrom(superClass)) { ValueCoder coder = null; final CoderManager cm = _persistit.getCoderManager(); if (cm != null) { coder = cm.lookupValueCoder(superClass); } if (!(coder instanceof DefaultValueCoder)) { coder = new DefaultValueCoder(persistit, superClass); } if (coder instanceof ValueRenderer) { _superClassValueRenderer = (ValueRenderer) coder; } } }
function
java
34
def genFile(files,fName,postfix): script = open(fName,"w") firstline='dbName,simuType,nside,coadd,fieldtype,nproc\n' script.write(firstline) r = [] for fi in files: dbName = fi.split('/')[-1].split('.db')[0] r.append(len(dbName)) for fi in files: dbName = fi.split('/')[-1].split('.db')[0] line='{},{}\n'.format(dbName,postfix) print(line) script.write(line) script.close()
function
python
35
protected virtual void Dispose( bool disposing ) { if ( disposed ) { return; } disposed = true; if ( !disposing ) { return; } lock ( syncRoot ) { foreach ( var stream in streams ) { stream.Dispose(); } streams.Clear(); } }
function
c#
36
def _drop_obsolete_columns(self, df_season: pd.DataFrame) -> pd.DataFrame: if self._mode == "time_series": df_season = df_season.drop("month", axis=1) elif self._mode in ["climatology", "departures"]: df_season = df_season.drop(["year", "month"], axis=1) else: raise ValueError( "Unable to drop columns in the datetime components " f"DataFrame for unsupported mode, '{self._mode}'." ) return df_season
function
python
37
private ReadOnlyCollection<LoadedAssemblyDetails> GetAssembliesInternal() { lock (_explicitLoadedAssemblies) { return _explicitLoadedAssemblies .Distinct() .ToList().AsReadOnly(); } }
function
c#
38
public XmlDocument parseInputStream(InputStream is, String encoding) throws XmlBuilderException { XmlPullParser pp = null; try { pp = factory.newPullParser(); pp.setInput(is, encoding); } catch (XmlPullParserException e) { throw new XmlBuilderException("could not start parsing input stream (encoding="+encoding+")", e); } return parse(pp); }
function
java
39
public void SynchronizeSlimChain(SlimChain chain, uint256 hashStop = null, CancellationToken cancellationToken = default(CancellationToken)) { if (chain is null) throw new ArgumentNullException(nameof(chain)); AssertState(NodeState.HandShaked, cancellationToken); Logs.NodeServer.LogInformation("Building chain"); using (var listener = this.CreateListener().OfType<HeadersPayload>()) { while (true) { var currentTip = chain.TipBlock; var awaited = currentTip.Previous is null ? chain.GetLocator(currentTip.Height) : chain.GetLocator(currentTip.Height - 1); if (awaited is null) continue; SendMessageAsync(new GetHeadersPayload() { BlockLocators = awaited, HashStop = hashStop }); while (true) { bool isOurs = false; HeadersPayload headers = null; using (var headersCancel = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken)) { headersCancel.CancelAfter(PollHeaderDelay); try { headers = listener.ReceivePayload<HeadersPayload>(headersCancel.Token); } catch (OperationCanceledException) { if (cancellationToken.IsCancellationRequested) throw; break; } } if (headers.Headers.Count == 0 && PeerVersion.StartHeight == 0 && currentTip.Hash == Network.GenesisHash) return; if (headers.Headers.Count == 1 && headers.Headers[0].GetHash() == currentTip.Hash) return; foreach (var header in headers.Headers) { var h = header.GetHash(); if (h == currentTip.Hash) continue; if (header.HashPrevBlock == currentTip.Hash) { isOurs = true; currentTip = new SlimChainedBlock(h, currentTip.Hash, currentTip.Height + 1); chain.TrySetTip(currentTip.Hash, currentTip.Previous); if (currentTip.Hash == hashStop) return; } else if (chain.TrySetTip(h, header.HashPrevBlock)) { currentTip = chain.TipBlock; } else break; } if (isOurs) break; } } } }
function
c#
40
func (bm *BoundedMeanFloat64) Result() float64 { if bm.state != defaultState { log.Fatalf("Mean's noised result cannot be computed. Reason: " + bm.state.errorMessage()) } bm.state = resultReturned noisedCount := math.Max(1.0, float64(bm.Count.Result())) noisedSum := bm.NormalizedSum.Result() clamped, err := ClampFloat64(noisedSum/noisedCount+bm.midPoint, bm.lower, bm.upper) if err != nil { log.Fatalf("Couldn't clamp the result, err %v", err) } return clamped }
function
go
41
public static double[] ComputeDiscardComplementsGaussLegendreAltInvariant(Normal[] distributions, double[] evalPoints, double[] weights) { if (evalPoints.Length != weights.Length) { throw new ArgumentException("Error: Evaluation points must have same length as weights."); } int fevals = 0; distributions = NegateDistributions(distributions); double minMean = distributions[0].Mean; double maxMean = distributions[0].Mean; double maxStdev = 0; for (int i = 0; i < distributions.Length; i++) { if (distributions[i].Mean < minMean) { minMean = distributions[i].Mean; } if (distributions[i].Mean > maxMean) { maxMean = distributions[i].Mean; } if (distributions[i].StdDev > maxStdev) { maxStdev = distributions[i].StdDev; } } double intervalLowerLimit = minMean - 8 * maxStdev; double intervalUpperLimit = maxMean + 8 * maxStdev; double a = (intervalUpperLimit - intervalLowerLimit) / 2.0; double b = -1 * (2 * intervalLowerLimit / (intervalUpperLimit - intervalLowerLimit) + 1); Func<double, double> xOfz = z => (z - b) * a; double[] C = new double[evalPoints.Length]; double[] X = new double[evalPoints.Length]; for (int i = 0; i < C.Length; i++) { C[i] = weights[i]; X[i] = xOfz(evalPoints[i]); for (int j = 0; j < distributions.Length; j++) { C[i] *= distributions[j].CumulativeDistribution(X[i]); fevals++; } } double[] complementProbs = new double[distributions.Length]; for (int i = 0; i < distributions.Length; i++) { complementProbs[i] = 0; for (int j = 0; j < C.Length; j++) { double CDFij = distributions[i].CumulativeDistribution(X[j]); fevals++; if (CDFij > 0) { complementProbs[i] += distributions[i].Density(X[j]) * C[j] / CDFij; fevals++; } } complementProbs[i] *= a; Console.WriteLine($"GLAltInv[{i}]: {complementProbs[i]}"); } Console.WriteLine($"Function evaluations: {fevals}"); return complementProbs; }
function
c#
42
async def _create_rev_reg(self, rr_id: str, rr_size: int = None) -> None: LOGGER.debug('Issuer._create_rev_reg >>> rr_id: %s, rr_size: %s', rr_id, rr_size) rr_size = rr_size or 256 (cd_id, tag) = rev_reg_id2cred_def_id__tag(rr_id) LOGGER.info('Creating revocation registry (capacity %s) for rev reg id %s', rr_size, rr_id) tails_writer_handle = await blob_storage.open_writer( 'default', json.dumps({ 'base_dir': Tails.dir(self._dir_tails, rr_id), 'uri_pattern': '' })) apriori = Tails.unlinked(self._dir_tails) (rr_id, rrd_json, rre_json) = await anoncreds.issuer_create_and_store_revoc_reg( self.wallet.handle, self.did, 'CL_ACCUM', tag, cd_id, json.dumps({ 'max_cred_num': rr_size, 'issuance_type': 'ISSUANCE_ON_DEMAND' }), tails_writer_handle) delta = Tails.unlinked(self._dir_tails) - apriori if len(delta) != 1: LOGGER.debug( 'Issuer._create_rev_reg: <!< Could not create tails file for rev reg id: %s', rr_id) raise CorruptTails('Could not create tails file for rev reg id {}'.format(rr_id)) tails_hash = basename(delta.pop()) Tails.associate(self._dir_tails, rr_id, tails_hash) with REVO_CACHE.lock: rrd_req_json = await ledger.build_revoc_reg_def_request(self.did, rrd_json) await self._sign_submit(rrd_req_json) await self._get_rev_reg_def(rr_id) rre_req_json = await ledger.build_revoc_reg_entry_request(self.did, rr_id, 'CL_ACCUM', rre_json) await self._sign_submit(rre_req_json) LOGGER.debug('Issuer._create_rev_reg <<<')
function
python
43
class AxisView extends LayerView { /** * @param {Axis} axisProps * @param {import("./viewUtils").ViewContext} context * @param {string} type Data type (quantitative, ..., locus) * @param {import("./containerView").default} parent */ constructor(axisProps, type, context, parent) { // Now the presence of genomeAxis is based on field type, not scale type. // TODO: Use scale instead. However, it would make the initialization much more // complex because scales are not available before scale resolution. const genomeAxis = type == "locus"; // TODO: Compute extent /** @type {Axis | GenomeAxis} */ const fullAxisProps = { ...(genomeAxis ? defaultGenomeAxisProps : defaultAxisProps), ...getDefaultAngleAndAlign(type, axisProps), ...axisProps, }; super( genomeAxis ? createGenomeAxis(fullAxisProps) : createAxis(fullAxisProps), context, parent, `axis_${axisProps.orient}` ); this.axisProps = fullAxisProps; /** Axis should be updated before next render */ this.axisUpdateRequested = true; this._addBroadcastHandler("layout", () => { this.axisUpdateRequested = true; }); /** @type {any[]} */ this.previousScaleDomain = []; /** @type {number} TODO: Take from scal*/ this.axisLength = undefined; /** @type {TickDatum[]} */ this.ticks = []; this.tickSource = new DynamicCallbackSource(() => this.ticks); if (genomeAxis) { const channel = orient2channel(this.axisProps.orient); const genome = this.getScaleResolution(channel).getGenome(); this.findChildByName(CHROM_LAYER_NAME).getDynamicDataSource = () => new DynamicCallbackSource(() => genome.chromosomes); } } getOrient() { return this.axisProps.orient; } getSize() { /** @type {SizeDef} */ const perpendicularSize = { px: this.getPerpendicularSize() }; /** @type {SizeDef} */ const mainSize = { grow: 1 }; if (ORIENT_CHANNELS[this.axisProps.orient] == "x") { return new FlexDimensions(mainSize, perpendicularSize); } else { return new FlexDimensions(perpendicularSize, mainSize); } } getPerpendicularSize() { return getExtent(this.axisProps); } getDynamicDataSource() { return this.tickSource; } _updateAxisData() { // TODO: This could be a transform that generates ticks on the fly // Would allow for unlimited customization. const channel = orient2channel(this.axisProps.orient); const scale = this.getScaleResolution(channel).getScale(); const currentScaleDomain = scale.domain(); if ( shallowArrayEquals(currentScaleDomain, this.previousScaleDomain) && !this.axisUpdateRequested ) { // TODO: Instead of scale comparison, register an observer to Resolution return; } this.previousScaleDomain = currentScaleDomain; const oldTicks = this.ticks; const newTicks = generateTicks( this.axisProps, scale, this.axisLength, oldTicks ); if (newTicks !== oldTicks) { this.ticks = newTicks; this.tickSource.loadSynchronously(); } this.axisUpdateRequested = false; } onBeforeRender() { super.onBeforeRender(); this._updateAxisData(); } /** * @param {import("./renderingContext/viewRenderingContext").default} context * @param {import("../utils/layout/rectangle").default} coords * @param {import("./view").RenderingOptions} [options] */ render(context, coords, options = {}) { this.axisLength = coords[CHANNEL_DIMENSIONS[orient2channel(this.getOrient())]]; super.render(context, coords, options); } }
class
javascript
44
int columnar_get_col_width(int typid, int width, bool aligned) { if (COL_IS_ENCODE(typid)) { if (aligned) { return alloc_trunk_size(width); } else { return width; } } else return 0; }
function
c++
45
public String serialize(XMLStreamReader reader) { if (reader == null) { throw new IllegalArgumentException("XMLStreamReader cannot be null"); } try { if (transformer == null) { transformer = transformerFactory.newTransformer(); } StAXSource stAXSrouce = new StAXSource(reader); StringWriter stringWriter = new StringWriter(); StreamResult streamResult = new StreamResult(stringWriter); transformer.transform(stAXSrouce, streamResult); stringWriter.flush(); transformer.reset(); return stringWriter.toString(); } catch (TransformerException e) { throw new RuntimeException("Could not serialize the document", e); } }
function
java
46
def plot_coverage(self,spheres): box = self.domain_dimensions if box.dim_z != 0: for sph in spheres: dent = 2*sph.r*(0.4/1.18) circle = plt.Circle((sph.x, sph.z), dent/2 , edgecolor = 'black', facecolor = 'red', alpha = 0.08) plt.gca().add_patch(circle) plt.gca().set_xlim((-box.dim_x/2, box.dim_x/2)) plt.gca().set_ylim((-box.dim_z/2, box.dim_z/2)) plt.gca().set_aspect('equal','box') plt.gca().grid() plt.title("Coverage") plt.show() elif box.dim_z == 0: print('Coverage plot is only available in 3D spheres') return
function
python
47
def ExecuteQuery(self, query_name: Text) -> Tuple[float, Dict[str, str]]: query_command = ( 'python script_driver.py --script={} --server={} --database={} ' '--user={} --password={} --query_timeout={}').format( query_name, self.server_name, self.database, self.user, self.password, FLAGS.query_timeout) stdout, _ = self.client_vm.RemoteCommand(query_command) performance = json.loads(stdout) details = copy.copy(self.GetMetadata()) details['job_id'] = performance[query_name]['job_id'] return float(performance[query_name]['execution_time']), details
function
python
48
public List<ColumnType> types() { List<ColumnType> columnTypes = new ArrayList<>(columnCount()); for (int i = 0; i < columnCount(); i++) { columnTypes.add(columns().get(i).type()); } return columnTypes; }
function
java
49
private String longTo3String(long l) { String[] returnThis = new String[3]; String longS = Long.toString(l); int div = longS.length()/3; returnThis[0]=longS.substring(0, div); returnThis[1]=longS.substring(div, (2*div)); returnThis[2]=longS.substring(2*div); String returnme = returnThis[0]+" " + returnThis[1]+" " + returnThis[2]; return returnme; }
function
java
50
def EnumToRegex(enum_type: Optional[Type[E]] = None, omit: Optional[List[E]] = None, exactly: Optional[E] = None) -> syaml.Regex: if exactly: if enum_type or omit: raise ValueError('Cannot specify an exact value with other constraints') return _OrRegex([exactly.value]) if omit is None: omit = [] return _OrRegex([v.value for v in list(enum_type) if v not in omit])
function
python
51
def shorten_url(url: str) -> str: The performance of this algorithm in the worst case is terrible, but per the current parameters there are 7**62 possible shortkeys, so it is unlikely that step 3 of the algorithms will ever be reached. InvalidURLError is propogated to caller url = _validate_url(url) hashstr = hashlib.sha1(url.encode()).hexdigest() long_keystr = _key_from_hex(hashstr) for i in range(len(long_keystr) - SHORTKEY_LENGTH + 1): candidate_key = long_keystr[i:i+SHORTKEY_LENGTH] if _try_insert(candidate_key, url): return candidate_key none of the key windows were available, so increment the current candidate key until we find an available one initial_key = candidate_key cur = _next_key(initial_key) while cur != initial_key: if _try_insert(cur, url): return cur cur = _next_key(cur) if not cur: wrap around cur = '0' * SHORTKEY_LENGTH there are no more available shortkeys raise OutOfShortKeysError
function
python
52
static void gtaskqueue_drain_tq_active(struct gtaskqueue *queue) { struct gtaskqueue_busy tb_marker, *tb_first; if (TAILQ_EMPTY(&queue->tq_active)) return; queue->tq_callouts++; tb_marker.tb_running = TB_DRAIN_WAITER; TAILQ_INSERT_TAIL(&queue->tq_active, &tb_marker, tb_link); while (TAILQ_FIRST(&queue->tq_active) != &tb_marker) TQ_SLEEP(queue, &tb_marker, &queue->tq_mutex, PWAIT, "-", 0); TAILQ_REMOVE(&queue->tq_active, &tb_marker, tb_link); tb_first = TAILQ_FIRST(&queue->tq_active); if (tb_first != NULL && tb_first->tb_running == TB_DRAIN_WAITER) wakeup(tb_first); queue->tq_callouts--; if ((queue->tq_flags & TQ_FLAGS_ACTIVE) == 0) wakeup_one(queue->tq_threads); }
function
c
53
private object GetStoryParameter(object p, bool viewerNeeded) { object parameter = null; StoryViewer storyViewer = p as StoryViewer; if (storyViewer != null) { if (viewerNeeded) { parameter = storyViewer; } else { parameter = storyViewer.Story; } } else { parameter = GetStoryFromCommandSource(p); } return parameter; }
function
c#
54
fn parse_set_args(args: Vec<String>) -> Subcommand { let mut simple = HashMap::new(); let mut json = None; let mut iter = args.into_iter(); while let Some(arg) = iter.next() { match arg.as_ref() { "-j" | "--json" if json.is_some() => { usage_msg( "Can't specify the --json argument multiple times. You can set as many \ settings as needed within the JSON object.", ); } "-j" | "--json" if json.is_none() => { let raw_json = iter .next() .unwrap_or_else(|| usage_msg("Did not give argument to -j | --json")); let input_val: serde_json::Value = serde_json::from_str(&raw_json).unwrap_or_else(|e| { usage_msg(&format!("Couldn't parse given JSON input: {}", e)) }); let mut input_map = match input_val { serde_json::Value::Object(map) => map, _ => usage_msg("JSON input must be an object (map)"), }; // To be nice, if the user specified a "settings" layer around their data, we // remove it. (This should only happen if there's a single key, since we only // allow setting settings; fail otherwise. If we allow setting other types in the // future, we'll have to do more map manipulation here to save the other values.) if let Some(val) = input_map.remove("settings") { match val { serde_json::Value::Object(map) => input_map.extend(map), _ => usage_msg("JSON 'settings' value must be an object (map)"), }; } json = Some(input_map.into()); } x if x.contains('=') => { let mut split = x.splitn(2, '='); let raw_key = split.next().unwrap(); let value = split.next().unwrap(); let mut key = Key::new(KeyType::Data, raw_key).unwrap_or_else(|_| { usage_msg(&format!("Given key '{}' is not a valid format", raw_key)) }); // Add "settings" prefix if the user didn't give a known prefix, to ease usage let key_prefix = &key.segments()[0]; if key_prefix != "settings" { let mut segments = key.segments().clone(); segments.insert(0, "settings".to_string()); key = Key::from_segments(KeyType::Data, &segments) .expect("Adding prefix to key resulted in invalid key?!"); } simple.insert(key, value.to_string()); } x => usage_msg(&format!("Unknown argument '{}'", x)), } } if json.is_some() && !simple.is_empty() { usage_msg("Cannot specify key=value pairs and --json settings with 'set'"); } else if let Some(json) = json { Subcommand::Set(SetArgs::Json(json)) } else if !simple.is_empty() { Subcommand::Set(SetArgs::Simple(simple)) } else { usage_msg("Must specify key=value settings or --json settings with 'set'"); } }
function
rust
55
@XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "spottingPattern", "orientation" }) public static class Pattern { @XmlElement(required = true) protected OntologyEntryType spottingPattern; @XmlElement(required = true) protected MsRun.Spotting.Plate.Pattern.Orientation orientation; /** * Gets the value of the spottingPattern property. * * @return possible object is {@link OntologyEntryType } */ public OntologyEntryType getSpottingPattern() { return spottingPattern; } /** * Sets the value of the spottingPattern property. * * @param value allowed object is {@link OntologyEntryType } */ public void setSpottingPattern(OntologyEntryType value) { this.spottingPattern = value; } /** * Gets the value of the orientation property. * * @return possible object is {@link MsRun.Spotting.Plate.Pattern.Orientation } */ public MsRun.Spotting.Plate.Pattern.Orientation getOrientation() { return orientation; } /** * Sets the value of the orientation property. * * @param value allowed object is {@link MsRun.Spotting.Plate.Pattern.Orientation } */ public void setOrientation(MsRun.Spotting.Plate.Pattern.Orientation value) { this.orientation = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this * class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="firstSpotID" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="secondSpotID" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "") public static class Orientation { @XmlAttribute(name = "firstSpotID", required = true) protected String firstSpotID; @XmlAttribute(name = "secondSpotID", required = true) protected String secondSpotID; /** * Gets the value of the firstSpotID property. * * @return possible object is {@link String } */ public String getFirstSpotID() { return firstSpotID; } /** * Sets the value of the firstSpotID property. * * @param value allowed object is {@link String } */ public void setFirstSpotID(String value) { this.firstSpotID = value; } /** * Gets the value of the secondSpotID property. * * @return possible object is {@link String } */ public String getSecondSpotID() { return secondSpotID; } /** * Sets the value of the secondSpotID property. * * @param value allowed object is {@link String } */ public void setSecondSpotID(String value) { this.secondSpotID = value; } } }
class
java
56
public static Constructor<?> lookup(final Class<?> clazz, final Class<?>[] argTypes) { final Constructor<?> cachedConstructor = get(clazz, argTypes); if (cachedConstructor != null) { return cachedConstructor; } else { final Constructor<?> uncachedConstructor = directConstructorLookup(clazz, argTypes); if (uncachedConstructor != null) { put(clazz, argTypes, uncachedConstructor); } return uncachedConstructor; } }
function
java
57
private void prepareForNewBatchQuery() { if (completed) { LOG.debug().$("prepare for new query").$(); isEmptyQuery = false; bindVariableService.clear(); currentCursor = Misc.free(currentCursor); typesAndInsert = null; clearCursorAndFactory(); rowCount = 0; queryTag = TAG_OK; queryText = null; wrapper = null; syncActions.clear(); sendParameterDescription = false; } }
function
java
58
parseOptions(tokens) { const options = emptyTextureOptions(); let option; let values; const optionsToValues = {}; tokens.reverse(); while (tokens.length) { const token = tokens.pop(); if (token.startsWith("-")) { option = token.substr(1); optionsToValues[option] = []; } else if (option) { optionsToValues[option].push(token); } } for (option in optionsToValues) { if (!optionsToValues.hasOwnProperty(option)) { continue; } values = optionsToValues[option]; const optionMethod = this[`parse_${option}`]; if (optionMethod) { optionMethod.bind(this)(values, options); } } return options; }
function
javascript
59
bool SpdyAltSvcWireFormat::ParseProbability(StringPiece::const_iterator c, StringPiece::const_iterator end, double* probability) { if (c == end) { return false; } if (end - c == 1 && *c == '.') { return false; } if (*c == '1') { *probability = 1.0; ++c; } else { *probability = 0.0; if (*c == '0') { ++c; } } if (c == end) { return true; } if (*c != '.') { return false; } ++c; double place_value = 0.1; for (; c != end && isdigit(*c); ++c) { *probability += place_value * (*c - '0'); place_value *= 0.1; } return (c == end && *probability <= 1.0); }
function
c++
60
def screenSelect(self, direction): lstScreens = self.root.screen_names screenIndex = lstScreens.index(self.root.current) if direction == 'previous': screenIndex -= 1 self.root.transition = SlideTransition(direction='right') if screenIndex < 0: self.root.transition = SlideTransition(direction='left') self.root.current = lstScreens[-1] return else: screenIndex += 1 self.root.transition = SlideTransition(direction='left') if screenIndex >= len(lstScreens): self.root.transition = SlideTransition(direction='right') self.root.current = lstScreens[0] return self.root.current = lstScreens[screenIndex]
function
python
61
public static int register(Class<? extends JHeader> c, List<HeaderDefinitionError> errors) { AnnotatedHeader annotatedHeader = inspect(c, errors); if (errors.isEmpty() == false) { return -1; } Entry e = mapByClassName.get(c.getCanonicalName()); if (e == null) { e = createNewEntry(c); } int id = e.id; e.annotatedHeader = annotatedHeader; scanners[id] = new JHeaderScanner(c); registerAnnotatedSubHeaders(annotatedHeader.getHeaders()); JBinding[] bindings = AnnotatedBinding.inspectJHeaderClass(c, errors); if (errors.isEmpty() == false) { return -1; } addBindings(bindings); for (PcapDLT d : annotatedHeader.getDlt()) { registerDLT(d, id); } return id; }
function
java
62
private Watchlist decode(String line) { String[] lineSplit = line.split(WATCHLIST_LINE_DELIMITER_FOR_DECODE, 2); if (!isValidWatchlistString(lineSplit)) { return null; } String watchlistName = lineSplit[0].trim(); if (watchlistName.isBlank()) { return null; } String animeListString = lineSplit[1].trim(); String animeListStringContent = animeListString.substring(1, animeListString.length() - 1); ArrayList<Integer> animeList = new ArrayList<>(); if (animeListStringContent.isBlank()) { return new Watchlist(watchlistName, animeList); } String[] animes = animeListStringContent.split(DELIMITER_FOR_ENCODED_ANIME_LIST); if (animes.length == 0) { return null; } for (String animeIndex : animes) { String trimmedIndex = animeIndex.trim(); if (!isPositiveInteger(trimmedIndex)) { return null; } int parsedAnimeIndex = parseStringToInteger(trimmedIndex); boolean isValidAnimeIndex = (parsedAnimeIndex <= MAX_ANIME_INDEX) && !(animeList.contains(parsedAnimeIndex)); if (!isValidAnimeIndex) { return null; } animeList.add(parsedAnimeIndex); } return new Watchlist(watchlistName, animeList); }
function
java
63
function generateListOfAllTests(callback) { mkdirp(TEMPORARY_DIRECTORY, function(err) { if (err) { logError(err); return; } glob(TEST_DIRECTORY + '/**/*_test.html', {}, function(er, file_names) { fs.writeFile( TEMPORARY_DIRECTORY + '/all_tests.js', 'var _allTests = ' + JSON.stringify(file_names) + ';', callback); }); }); }
function
javascript
64
def call( self, inputs: tf.Tensor, states: Optional[States] = None, output_states: bool = True, ) -> Union[tf.Tensor, Tuple[tf.Tensor, States]]: states = dict(states) if states is not None else {} num_frames = tf.shape(inputs)[1] frame_count = tf.cast(states.get(self._frame_count_name, [0]), tf.int32) states[self._frame_count_name] = frame_count + num_frames if self._cache_encoding: pos_encoding = self._pos_encoding else: pos_encoding = self._get_pos_encoding( tf.shape(inputs), frame_count=frame_count) pos_encoding = tf.cast(pos_encoding, inputs.dtype) pos_encoding = self._rezero(pos_encoding) outputs = inputs + pos_encoding return (outputs, states) if output_states else outputs
function
python
65
def save(self, flush=True, **kwargs): self.new_objects = [] self.changed_objects = [] self.deleted_objects = [] saved_instances = [] for form in self.extra_forms: if not form.has_changed(): continue if self.can_delete and self._should_delete_form(form): continue obj = form.save(flush=flush) self.new_objects.append(obj) saved_instances.append(obj) for form in self.initial_forms: if form in self.deleted_forms: self.deleted_objects.append(form.instance) self.delete_existing(form.instance) continue elif form.has_changed(): self.changed_objects.append(form.instance) obj = form.save(flush=flush) saved_instances.append(obj) return saved_instances
function
python
66
public class BFileReader { public final static String CVSID = "@(#) $Id: BFileReader.java 744 2011-07-26 06:29:20Z gfis $"; /** log4j logger (category) */ private Logger log; /* local copy of the sequence read from the b-file */ private ArrayList<BigInteger> sequence; /** No-args Constructor */ public BFileReader() { sequence = new ArrayList<BigInteger>(1024); sequence.add(BigInteger.ZERO); } // no-args Constructor /** Reads a text file, appends the (partial) sequence, * and returns the sequence stored so far. * Lines starting with "#" (possibly preceeded by whitespace) * are treated as comments and are ignored, as are empty lines. * If the line contains 2 numbers, the 1st is the index (which is ignored) * and the 2nd is the number in the sequence. * If the line contains only 1 number, that number is appended to the sequence. * @param fileName name of the file to be parsed, * or read from STDIN if <em>fileName</em> is <em>null</em>, empty or "-" * @return array list with the sequence */ public ArrayList<BigInteger> read(String fileName) { String line = null; // current line from text file String[] numbers = null; try { BufferedReader lineReader = new BufferedReader ( (fileName == null || fileName.length() <= 0 || fileName.equals("-")) ? new InputStreamReader(System.in) : new FileReader(fileName) ); while ((line = lineReader.readLine()) != null) { // read and process lines if (! line.matches("\\s*#.*") && ! line.matches("\\s*")) { // no comment line, no empty line numbers = line.trim().split("\\s+"); int last = numbers.length - 1; /* try { int index = Integer.parseInt(numbers[0]); } catch (Exception exc) { } */ sequence.add(new BigInteger(numbers[last])); } else { // ignore comment line } } // while ! eof lineReader.close(); } catch (Exception exc) { log.error(exc.getMessage(), exc); } // try return sequence; } // read /** Test method, reads a file and prints the concatenated lines * @param args command line arguments: filename or empty = STDIN */ public static void main(String[] args) { String fileName = null; if (args.length >= 2) { // ignore [0] = "-f" fileName = args[1]; } ArrayList<BigInteger> seq = (new BFileReader()).read(fileName); int index = 0; while (index < seq.size()) { System.out.println(index + " " + seq.get(index).toString()); index ++; } // while index } // main }
class
java
67
def viz(mode, annot_subset, banlist): print('start') img_dir = '../InterHand2.6M_5fps_batch0/images' annot_path = '../InterHand2.6M_5fps_batch0/annotations' joint_num = 21 root_joint_idx = {'right': 20, 'left': 41} joint_type = {'right': np.arange(0, joint_num), 'left': np.arange(joint_num, joint_num * 2)} print("Load annotation from " + os.path.join(annot_path, annot_subset)) with open(os.path.join(annot_path, annot_subset, 'InterHand2.6M_' + mode + '_data.json')) as f: data = json.load(f) with open(os.path.join(annot_path, annot_subset, 'InterHand2.6M_' + mode + '_camera.json')) as f: cameras = json.load(f) with open(os.path.join(annot_path, annot_subset, 'InterHand2.6M_' + mode + '_joint_3d.json')) as f: joints = json.load(f) imgs = data['images'] anns = data['annotations'] for idx, img in enumerate(imgs): if img['file_name'].split('/')[-2] not in banlist: ann = anns[idx] cam = img['camera'] capture_id = img['capture'] fram_id = img['frame_idx'] bbox = ann['bbox'] bbox = [bbox[0], bbox[0] + bbox[2], bbox[1], bbox[1] + bbox[3]] joints_world = np.array(joints[str(capture_id)][str(fram_id)]['world_coord']) campos, camrot = np.array(cameras[str(capture_id)]['campos'][str(cam)], dtype=np.float32), np.array( cameras[str(capture_id)]['camrot'][str(cam)], dtype=np.float32) focal, princpt = np.array(cameras[str(capture_id)]['focal'][str(cam)], dtype=np.float32), np.array( cameras[str(capture_id)]['princpt'][str(cam)], dtype=np.float32) joints_cam = world2cam(joints_world.transpose((1, 0)), camrot, campos.reshape(3, 1)).transpose((1, 0)) joints_img = cam2pixel(joints_cam, focal, princpt)[:, :2] print(ann) print(img) joint_valid = np.array(ann['joint_valid'], dtype=np.float32).reshape(joint_num * 2) if ann['hand_type'] != 'interacting': hand = joint_type[ann['hand_type']] kp_2d = joints_img[hand] kp_cam = joints_cam[hand] angle = hand_angle(kp_cam) img_path = os.path.join(img_dir, mode, img['file_name']) image = cv2.imread(img_path) print(np.sum(image)) draw_hand(image, kp_2d, angle, bbox) else: joints_img_left = joints_img[joint_type['left']] joints_img_right = joints_img[joint_type['right']] bbox_left = [min(joints_img_left[:, 0]) - 10, max(joints_img_left[:, 0]) + 10, min(joints_img_left[:, 1]) - 10, max(joints_img_left[:, 1]) + 10] bbox_right = [min(joints_img_right[:, 0]) - 10, max(joints_img_right[:, 0]) + 10, min(joints_img_right[:, 1]) - 10, max(joints_img_right[:, 1]) + 10] iou_perc = iou(bbox_left, bbox_right) img_path = os.path.join(img_dir, mode, img['file_name']) image = cv2.imread(img_path) draw_hand(image, joints_img_right, iou_perc, bbox_right) image = cv2.imread(img_path) draw_hand(image, joints_img_left, iou_perc, bbox_left)
function
python
68
public class EjbJar30DataLoader extends EjbJarDataLoader{ private static final long serialVersionUID = 1L; private static final String REQUIRED_MIME_PREFIX_3 = "text/x-dd-ejbjar3.0"; // NOI18N private static final String REQUIRED_MIME_PREFIX_3_1 = "text/x-dd-ejbjar3.1"; // NOI18N private static final String REQUIRED_MIME_PREFIX_3_2 = "text/x-dd-ejbjar3.2"; // NOI18N public EjbJar30DataLoader () { super ("org.netbeans.modules.j2ee.ddloaders.multiview.EjbJarMultiViewDataObject"); // NOI18N } protected String actionsContext() { return "Loaders/text/x-dd-ejbjar3.0/Actions/"; // NOI18N } protected String[] getSupportedMimeTypes(){ return new String[]{REQUIRED_MIME_PREFIX_3, REQUIRED_MIME_PREFIX_3_1, REQUIRED_MIME_PREFIX_3_2}; } }
class
java
69
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_schedule); setupEventList(); BottomNavigationView navigation = (BottomNavigationView) findViewById(R.id.navigation); navigation.setLabelVisibilityMode(LabelVisibilityMode.LABEL_VISIBILITY_UNLABELED); navigation.setSelectedItemId(R.id.action_schedule); navigation.setOnNavigationItemSelectedListener( new BottomNavigationView.OnNavigationItemSelectedListener() { @Override public boolean onNavigationItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.action_companies: Intent intent = new Intent(ScheduleActivity.this, MainActivity.class); startActivity(intent); overridePendingTransition(android.R.anim.fade_in, android.R.anim.fade_out); return true; case R.id.action_map: intent = new Intent(ScheduleActivity.this, MapActivity.class); startActivity(intent); overridePendingTransition(android.R.anim.fade_in, android.R.anim.fade_out); return true; case R.id.action_schedule: return true; case R.id.action_settings: intent = new Intent(ScheduleActivity.this, MenuActivity.class); startActivity(intent); overridePendingTransition(android.R.anim.fade_in, android.R.anim.fade_out); return true; } return false; } }); }
function
java
70
long ctslab (cplx *contrast, cplx *k, real *r, real *lr, real *nr, cplx kbg, long nx, long ny, real delta) { long i, npx; real rval; npx = nx * ny; #pragma omp parallel for default(shared) private(rval,i) for (i = 0; i < npx; ++i) { contrast[i] = k[i] / kbg; contrast[i] = contrast[i] * contrast[i] - 1; rval = lapden (r, lr, nr, delta, i, nx, ny); contrast[i] -= rval * r[i] / (kbg * kbg); } return npx; }
function
c
71
public void createGrid(int numRows, int numCols) { createGrid(new String[numRows][numCols]); for(int i = 0; i<getRows();i++) { for(int j = 0; j<getCols();j++) { double choice = Math.random(); if (choice<=percentEmpty) { setCell(i,j,"empty"); } else{ setCell(i,j,"blocked"); } } } setCell(25,25,"full"); }
function
java
72
constructMovieArray (revisedMoviesAvailable, revisedPageOptionNumber) { const resultArray = [] for (let i = 0; i < revisedMoviesAvailable.length; i++) { const newObject = { movie: revisedMoviesAvailable[i], option: parseFloat(revisedPageOptionNumber[i]) } resultArray.push(newObject) } return resultArray }
function
javascript
73
void ChromeMiniInstaller::UnInstall() { printf("Verifying if Chrome is installed...\n"); BrowserDistribution* dist = BrowserDistribution::GetDistribution(); if (!CheckRegistryKey(dist->GetVersionKey())) { printf("Chrome is not installed.\n"); return; } printf("\nClosing Chrome processes, if any...\n"); CloseProcesses(installer_util::kChromeExe); std::wstring uninstall_path = GetUninstallPath(); ASSERT_TRUE(file_util::PathExists(uninstall_path)); printf("\nUninstalling Chrome...\n"); process_util::LaunchApp(L"\"" + uninstall_path + L"\"" + L" -uninstall", false, false, NULL); printf("\nLaunched setup.exe -uninstall....\n"); ASSERT_TRUE(CloseWindow( mini_installer_constants::kConfirmDialog, WM_COMMAND)); WaitUntilProcessStopsRunning( mini_installer_constants::kChromeSetupExecutable); ASSERT_FALSE(CheckRegistryKey(dist->GetVersionKey())); DeleteAppFolder(); FindChromeShortcut(); CloseProcesses(mini_installer_constants::kIEExecutable); ASSERT_EQ(0, process_util::GetProcessCount( mini_installer_constants::kIEExecutable, NULL)); }
function
c++
74
public class BookingEndorsementFilter implements AcceptIfResolver { @Override public boolean accept(Element element, Object parentObject) { Elements endorsementName = element.select("div:nth-child(1) > div:nth-child(1) > div:nth-child(2) > p:nth-child(1)"); return !endorsementName.get(0).text().toLowerCase().contains("city"); } @Override public void init(Field field, Object parentObject, Selector selector) throws ObjectCreationException { } }
class
java
75
func (s ServiceCommonGeneratorV1) validate() error { if len(s.Name) == 0 { return fmt.Errorf("name must be specified") } if len(s.Type) == 0 { return fmt.Errorf("type must be specified") } if s.ClusterIP == v1.ClusterIPNone && s.Type != v1.ServiceTypeClusterIP { return fmt.Errorf("ClusterIP=None can only be used with ClusterIP service type") } if s.ClusterIP != v1.ClusterIPNone && len(s.TCP) == 0 && s.Type != v1.ServiceTypeExternalName { return fmt.Errorf("at least one tcp port specifier must be provided") } if s.Type == v1.ServiceTypeExternalName { if errs := validation.IsDNS1123Subdomain(s.ExternalName); len(errs) != 0 { return fmt.Errorf("invalid service external name %s", s.ExternalName) } } return nil }
function
go
76
private Observable<Event> fileSelected(Path filePath) { return Observable.just( Event.of(Event.MARKUP_PROPERTIES, Event.MARKUP_CONTROLLER, FILE_SELECTED, filePath), Event.of(Event.MARKUP_PROPERTIES, Event.PROPERTY_SELECTOR, CLEAR_SELECTION, null), Event.of(Event.MARKUP_PROPERTIES, Event.PROPERTY_ATTRIBUTES, CLEAR_ATTRIBUTES, null) ); }
function
java
77
private static boolean lockFile(File file) throws IOException, FileStateException { boolean bReturn = true; RandomAccessFile raf = null; if (!file.exists()) { throw new FileNotFoundException("The file " + file.getAbsolutePath() + " does not exist anymore."); } if (!file.canRead()) { throw new IOException("File is not readable: " + file); } if (!file.canWrite()) { throw new IOException("File is not writable: " + file); } try { raf = new RandomAccessFile(file, "rw"); FileChannel channel = raf.getChannel(); FileLock lock = channel.tryLock(); try { if (lock != null) { if (LOG.isDebugEnabled()) { LOG.debug("Lock on file " + file + " obtained."); } } else { if (LOG.isDebugEnabled()) { LOG.debug("Could not obtain lock on " + file); } bReturn = false; } } finally { if (lock != null) { lock.release(); } } } catch (FileNotFoundException e) { LOG.log(Severity.ERROR,e,LogMessages.FILE_COULD_NOT_BE_LOCKED, file); throw new FileStateException(e,FileStateException.EType.ABORT, LogMessages.FILE_COULD_NOT_BE_LOCKED, file); } catch (Exception e) { if (LOG.isDebugEnabled()) { LOG.debug("Lock could not be obtained. It is probably being locked by another process.", e); } bReturn = false; } finally { if (raf != null) { try { raf.close(); } catch (IOException e) { } } } return bReturn; }
function
java
78
public static String findFreeCamelContextName(BundleContext context, String prefix, String key, AtomicInteger counter, boolean checkFirst) { String candidate = null; boolean clash = false; do { try { clash = false; if (candidate == null && checkFirst) { candidate = prefix; } else { candidate = prefix + "-" + getNextCounter(counter); } LOG.trace("Checking OSGi Service Registry for existence of existing CamelContext with name: {}", candidate); ServiceReference<?>[] refs = context.getServiceReferences(CamelContext.class.getName(), "(" + key + "=" + candidate + ")"); if (refs != null && refs.length > 0) { for (ServiceReference<?> ref : refs) { Object id = ref.getProperty(key); if (id != null && candidate.equals(id)) { clash = true; break; } } } } catch (InvalidSyntaxException e) { LOG.debug("Error finding free Camel name in OSGi Service Registry due " + e.getMessage() + ". This exception is ignored.", e); break; } } while (clash); LOG.debug("Generated free name for bundle id: {}, clash: {} -> {}", context.getBundle().getBundleId(), clash, candidate); return candidate; }
function
java
79
public class ArrayIndexedPriorityQueue<T> implements ReactionManager<T> { private static final long serialVersionUID = 8064379974084348391L; private final TObjectIntMap<Reaction<T>> indexes = new ObjectIntHashMap<>(); private transient FastReadWriteLock rwLock = new FastReadWriteLock(); private final List<Time> times = new ArrayList<>(); private final List<Reaction<T>> tree = new ArrayList<>(); private static int getParent(final int i) { if (i == 0) { return -1; } return (i - 1) / 2; } @Override public void addReaction(final Reaction<T> r) { rwLock.write(); tree.add(r); times.add(r.getTau()); final int index = tree.size() - 1; indexes.put(r, index); updateEffectively(r, index); rwLock.release(); } private void down(final Reaction<T> r, final int i) { int index = i; final Time newTime = r.getTau(); do { int minIndex = 2 * index + 1; if (minIndex > tree.size() - 1) { return; } Time minTime = times.get(minIndex); Reaction<T> min = tree.get(minIndex); final int right = minIndex + 1; if (right < tree.size()) { final Time rr = times.get(right); if (rr.compareTo(minTime) < 0) { min = tree.get(right); minIndex = right; minTime = rr; } } if (newTime.compareTo(minTime) > 0) { swap(index, r, minIndex, min); index = minIndex; } else { return; } } while (true); } @Override public Reaction<T> getNext() { rwLock.read(); Reaction<T> res = null; if (!tree.isEmpty()) { res = tree.get(0); } rwLock.release(); return res; } @Override public void removeReaction(final Reaction<T> r) { rwLock.write(); final int index = indexes.get(r); final int last = tree.size() - 1; if (index == last) { tree.remove(index); indexes.remove(r); times.remove(index); } else { final Reaction<T> swapped = tree.get(last); indexes.put(swapped, index); tree.set(index, swapped); times.set(index, swapped.getTau()); tree.remove(last); times.remove(last); indexes.remove(r); updateEffectively(swapped, index); } rwLock.release(); } private void swap(final int i1, final Reaction<T> r1, final int i2, final Reaction<T> r2) { indexes.put(r1, i2); indexes.put(r2, i1); tree.set(i1, r2); tree.set(i2, r1); final Time t = times.get(i1); times.set(i1, times.get(i2)); times.set(i2, t); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); int pow = 0; int exp = 0; rwLock.read(); for (int i = 0; i < tree.size(); i++) { final int tabulars = (int) (Math.floor(Math.log(tree.size()) / Math.log(2)) - Math.floor(Math.log(i + 1) / Math.log(2))) + 1; for (int t = 0; t < tabulars; t++) { sb.append('\t'); } sb.append(times.get(i)); if (i == pow) { exp++; pow = pow + (int) Math.pow(2, exp); sb.append('\n'); } } rwLock.release(); return sb.toString(); } private boolean up(final Reaction<T> r, final int i) { int index = i; int parentIndex = getParent(index); final Time newTime = r.getTau(); if (parentIndex == -1) { return false; } else { Reaction<T> parent = tree.get(parentIndex); if (newTime.compareTo(times.get(parentIndex)) >= 0) { return false; } else { do { swap(index, r, parentIndex, parent); index = parentIndex; parentIndex = getParent(index); if (parentIndex == -1) { return true; } parent = tree.get(parentIndex); } while (newTime.compareTo(times.get(parentIndex)) < 0); return true; } } } private void updateEffectively(final Reaction<T> r, final int index) { if (!up(r, index)) { down(r, index); } } @Override public void updateReaction(final Reaction<T> r) { rwLock.write(); final int index = indexes.get(r); if (index != indexes.getNoEntryValue()) { times.set(index, r.getTau()); updateEffectively(r, index); } rwLock.release(); } private void readObject(final ObjectInputStream s) throws ClassNotFoundException, IOException { s.defaultReadObject(); rwLock = new FastReadWriteLock(); } }
class
java
80
public GameBoard doMove(Move m, int color) throws InvalidMoveException{ if(!isLegal(m,color)){ throw new InvalidMoveException(); } GameBoard temp = new GameBoard(); for(int j = 0; j <= 7; j++){ for (int i = 0; i<= 7; i++){ temp.board[j][i] = board[j][i]; } } temp.bChipCount = this.bChipCount; temp.wChipCount = this.wChipCount; if(m.moveKind == Move.STEP){ temp.board[m.x2][m.y2] = EMPTY; } if(m.moveKind == Move.ADD){ if(color == BLACK){ temp.bChipCount++; }else if(color == WHITE){ temp.wChipCount++; } } temp.board[m.x1][m.y1] = color; return temp; }
function
java
81
func (broker *TaskBroker) PublishTasks(queueName string, settings []models.TaskSetting) (err error) { logrus.Info(fmt.Sprintf("To schedule %d tests.", len(settings))) _, ch, err := broker.QueueDeclare(queueName) if err != nil { return fmt.Errorf("fail to decalre queue: %s", err.Error()) } logrus.Info(fmt.Sprintf("Declared queue %s. Begin publishing tasks ...", queueName)) for _, setting := range settings { body, err := json.Marshal(setting) if err != nil { logrus.Warnf("Fail to marshal task %s setting in JSON. Error %s. The task is skipped.", setting, err.Error()) continue } err = ch.Publish( "", queueName, false, false, amqp.Publishing{ DeliveryMode: amqp.Persistent, ContentType: "application/json", Body: body, }) if err != nil { logrus.Warnf("Fail to publish task %s. Error %s. The task is skipped.", setting, err.Error()) } } logrus.Info("Finish publish tasks") return nil }
function
go
82
def add(self, spec, method, level=0, authn_authority="", reference=None): if spec.authn_context_class_ref: key = spec.authn_context_class_ref.text _info = { "class_ref": key, "method": method, "level": level, "authn_auth": authn_authority } elif spec.authn_context_decl: key = spec.authn_context_decl.c_namespace _info = { "method": method, "decl": spec.authn_context_decl, "level": level, "authn_auth": authn_authority } else: raise NotImplementedError() self.next += 1 _ref = reference if _ref is None: _ref = str(self.next) assert _ref not in self.db["info"] self.db["info"][_ref] = _info try: self.db["key"][key].append(_ref) except KeyError: self.db["key"][key] = [_ref]
function
python
83
function mockSDKApi(apiPath, value) { var __toString = Object.prototype.toString; module(function ($provide) { var pathAssign = function (obj, pathStr, value) { var paths = pathStr.split(/\./); if (paths.length === 0) { return; } var path = paths.shift(); if (paths.length === 0) { obj[path] = value; return; } if (!obj[path]) { obj[path] = {}; } pathAssign(obj[path], paths.join('.'), value); }; if (typeof apiPath === 'string') { apiPath = [apiPath]; } var mockFB = { init: angular.noop }; if (__toString.call(apiPath) === '[object Object]' && !value) { angular.forEach(apiPath, function (v, p) { pathAssign(mockFB, p, v); }); } else { angular.forEach(apiPath, function (p) { pathAssign(mockFB, p, value); }); } $provide.decorator('$window', function ($delegate) { $delegate.FB = mockFB; return $delegate; }); }); }
function
javascript
84
@Slf4j public class ContentJobWriteInterceptor extends ContentWriteInterceptorBase { protected static final String EXPORT_STORAGE_URL = "export-storage-url"; protected static final String LOCAL_SETTINGS = "localSettings"; protected static final String JOB_TYPE = "job-type"; protected static final String XML_EXPORT = "xmlExport"; @Value("${xml-export.storage-url}") private String xmlExportStorageUrl; @Override public void intercept(ContentWriteRequest request) { Map<String, Object> properties = request.getProperties(); Object localSettings = properties.get(LOCAL_SETTINGS); if (localSettings != null) { Struct localSettingsStruct = (Struct) localSettings; String jobType = CapStructHelper.getString(localSettingsStruct, JOB_TYPE); if (XML_EXPORT.equals(jobType)) { StructBuilder structBuilder = getStructBuilder(localSettingsStruct, request.getEntity().getRepository().getConnection().getStructService()); setLocalSettingsExportUrl(xmlExportStorageUrl, localSettingsStruct, structBuilder); properties.put(LOCAL_SETTINGS, structBuilder.build()); } } } private StructBuilder getStructBuilder(Struct localSettings, StructService structService) { StructBuilder structBuilder = structService.createStructBuilder(); // https://documentation.coremedia.com/cmcc-10/artifacts/2104.1/javadoc/common/com/coremedia/cap/struct/StructBuilder.html for (CapPropertyDescriptor descriptor : localSettings.getType().getDescriptors()) { structBuilder.declare(descriptor, localSettings.get(descriptor.getName())); } return structBuilder; } private void setLocalSettingsExportUrl(String newSetting, Struct localSettings, StructBuilder structBuilder) { if ((localSettings.get(EXPORT_STORAGE_URL) == null) && (structBuilder.getDescriptor(EXPORT_STORAGE_URL) == null)) { structBuilder.declareString(EXPORT_STORAGE_URL, Integer.MAX_VALUE, newSetting); } else { structBuilder.set(EXPORT_STORAGE_URL, newSetting); } } }
class
java
85
def step_full(self) -> None: c, c_test = 0, 0 losses, losses_test = 0, 0 if self.compute_accuracy: acc, acc_test = 0, 0 for features, targets in self.train_loader: if self.augment_fn is not None: features, targets = self.augment_fn( features, targets, seed=c) loss = self.train_step(features, targets) losses += loss[0] if self.compute_accuracy: acc += loss[1] c += 1 else: for features_, targets_ in self.test_loader: if self.augment_fn is not None: features_, targets_ = self.augment_fn( features_, targets_, seed=c_test) loss_ = self.test_step(features_, targets_) losses_test += loss_[0] if self.compute_accuracy: acc_test += loss_[1] c_test += 1 self.loss_acc["train_loss"].append(losses / c) self.loss_acc["test_loss"].append(losses_test / c_test) if self.compute_accuracy: self.loss_acc["train_accuracy"].append(acc / c) self.loss_acc["test_accuracy"].append(acc_test / c_test)
function
python
86
hciStatus_t HCI_EXT_AdvEventNoticeCmd(uint8 taskID, uint16 taskEvent) { uint8_t taskId = ICall_getLocalMsgEntityId(ICALL_SERVICE_CLASS_BLE_MSG, taskID); return hciSendParamsCmd(HCI_EXT_ADV_EVENT_NOTICE, taskId, taskEvent, 0, matchHciExtAdvEventNoticeCS); }
function
c
87
void Draw::Camera::update (const float , const float elapsedTime, const bool simulationPaused) { const AbstractVehicle& v = *vehicleToTrack; const bool noVehicle = vehicleToTrack == NULL; Vec3 newPosition = position(); Vec3 newTarget = target; Vec3 newUp = up(); const float antiLagTime = simulationPaused ? 0 : 1 / smoothMoveSpeed; const float predictionTime = aimLeadTime + antiLagTime; switch (mode) { case cmFixed: newPosition = fixedPosition; newTarget = fixedTarget; newUp = fixedUp; break; case cmFixedDistanceOffset: if (noVehicle) break; newUp = Vec3::up; newTarget = v.predictFuturePosition (predictionTime); newPosition = constDistHelper (elapsedTime); break; case cmStraightDown: if (noVehicle) break; newUp = v.forward(); newTarget = v.predictFuturePosition (predictionTime); newPosition = newTarget; newPosition.y += lookdownDistance; break; case cmFixedLocalOffset: if (noVehicle) break; newUp = v.up(); newTarget = v.predictFuturePosition (predictionTime); newPosition = v.globalizePosition (fixedLocalOffset); break; case cmOffsetPOV: { if (noVehicle) break; newUp = v.up(); const Vec3 futurePosition = v.predictFuturePosition (antiLagTime); const Vec3 globalOffset = v.globalizeDirection (povOffset); newPosition = futurePosition + globalOffset; const float L = 10; newTarget = newPosition + (v.forward() * L); break; } default: break; } smoothCameraMove (newPosition, newTarget, newUp, elapsedTime); drawCameraLookAt (position(), target, up()); }
function
c++
88
private static void selection(Cell[] arr, int length, int k) { if (QUICKSELECT_BASED_PARTITION) { shuffle(arr, length); quickselect(arr, 0, length - 1, k - 1); } else { Arrays.sort(arr, 0, length, Comparator.comparing(c -> c.dist)); } }
function
java
89
int bluez_subscribe_signals(void) { g_dbus_connection_signal_subscribe(config.dbus, BLUEZ_SERVICE, DBUS_IFACE_OBJECT_MANAGER, "InterfacesAdded", NULL, NULL, G_DBUS_SIGNAL_FLAGS_NONE, bluez_signal_interfaces_added, NULL, NULL); g_dbus_connection_signal_subscribe(config.dbus, BLUEZ_SERVICE, DBUS_IFACE_OBJECT_MANAGER, "InterfacesRemoved", NULL, NULL, G_DBUS_SIGNAL_FLAGS_NONE, bluez_signal_interfaces_removed, NULL, NULL); g_dbus_connection_signal_subscribe(config.dbus, BLUEZ_SERVICE, DBUS_IFACE_PROPERTIES, "PropertiesChanged", NULL, BLUEZ_IFACE_MEDIA_TRANSPORT, G_DBUS_SIGNAL_FLAGS_NONE, bluez_signal_transport_changed, NULL, NULL); g_dbus_connection_signal_subscribe(config.dbus, DBUS_SERVICE, DBUS_IFACE_DBUS, "NameOwnerChanged", NULL, BLUEZ_SERVICE, G_DBUS_SIGNAL_FLAGS_NONE, bluez_signal_name_owner_changed, NULL, NULL); return 0; }
function
c
90
int suser(kauth_cred_t cred, u_short *acflag) { #if DIAGNOSTIC if (!IS_VALID_CRED(cred)) panic("suser"); #endif if (kauth_cred_getuid(cred) == 0) { if (acflag) *acflag |= ASU; return (0); } return (EPERM); }
function
c
91
func FilterPublicAddress(ips []string) (string, bool) { for _, address := range ips { address = strings.TrimSpace(address) ipAddress := net.ParseIP(address) if ipAddress == nil { continue } var isPrivate bool for i := range cidrs { if cidrs[i].Contains(ipAddress) { isPrivate = true } } if isPrivate { continue } return address, true } return "", false }
function
go
92
private Map<String, Integer> processLast(Tree last) { Map<String, Integer> result = new HashMap<>(); if (last.getChildCount() == 1) { int numberOfLast = Integer.valueOf(last.getChild(0).getText()); result.put("lastFrom", numberOfLast); result.put("howMany", numberOfLast); } else if (last.getChildCount() == 2) { int lastFrom = Integer.valueOf(last.getChild(0).getText()); int howMany = Integer.valueOf(last.getChild(1).getText()); result.put("lastFrom", lastFrom); result.put("howMany", howMany); } else { throw new IllegalArgumentException("Wrong syntax, LAST has to have exactly one or two arguments."); } /** * -----IMPORTANT------ because we assume that we have already entered the test execution that we want to process alert * with, we don't want to include it into the last X test executions, e.g. CONDITION result > x DEFINE x = (SELECT LAST * 1) would always fail, since the last test execution is the currently entered one. As a solution we do lastFrom - 1 * */ result.put("lastFrom", result.get("lastFrom") + 1); return result; }
function
java
93
public class MultipleRpcCommand extends BaseRpcInvokingCommand { public static final byte COMMAND_ID = 2; private static final Log log = LogFactory.getLog(MultipleRpcCommand.class); private static final boolean trace = log.isTraceEnabled(); private ReplicableCommand[] commands; private MultipleRpcCommand() { super(null); // For command id uniqueness test } public MultipleRpcCommand(List<ReplicableCommand> modifications, ByteString cacheName) { super(cacheName); commands = modifications.toArray(new ReplicableCommand[modifications.size()]); } public MultipleRpcCommand(ByteString cacheName) { super(cacheName); } /** * Executes commands replicated to the current cache instance by other cache instances. */ @Override public Object perform(InvocationContext ctx) throws Throwable { if (trace) log.tracef("Executing remotely originated commands: %d", commands.length); for (ReplicableCommand command : commands) { if (command instanceof TransactionBoundaryCommand) { command.perform(null); } else { processVisitableCommand(command); } } return null; } @Override public byte getCommandId() { return COMMAND_ID; } public ReplicableCommand[] getCommands() { return commands; } @Override public void writeTo(ObjectOutput output) throws IOException { MarshallUtil.marshallArray(commands, output); } @Override public void readFrom(ObjectInput input) throws IOException, ClassNotFoundException { commands = MarshallUtil.unmarshallArray(input, ReplicableCommand[]::new); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof MultipleRpcCommand)) return false; MultipleRpcCommand that = (MultipleRpcCommand) o; if (cacheName != null ? !cacheName.equals(that.cacheName) : that.cacheName != null) return false; if (!Arrays.equals(commands, that.commands)) return false; if (interceptorChain != null ? !interceptorChain.equals(that.interceptorChain) : that.interceptorChain != null) return false; return true; } @Override public int hashCode() { int result = interceptorChain != null ? interceptorChain.hashCode() : 0; result = 31 * result + (commands != null ? Arrays.hashCode(commands) : 0); result = 31 * result + (cacheName != null ? cacheName.hashCode() : 0); return result; } @Override public String toString() { return "MultipleRpcCommand{" + "commands=" + (commands == null ? null : Arrays.asList(commands)) + ", cacheName='" + cacheName + '\'' + '}'; } @Override public boolean isReturnValueExpected() { return false; } @Override public boolean canBlock() { for (ReplicableCommand command : commands) { if (command.canBlock()) { return true; } } return false; } }
class
java
94
def weather_fit(data): data = np.array(data) transition_counts = np.zeros((3, 3)) emission_counts = np.zeros((3, 2)) for index, datapoint in enumerate(data): if index != len(data)-1: transition_counts[data[index][0], data[index+1][0]] += 1 emission_counts[data[index][0], data[index][1]] += 1 transition_prob = transition_counts / np.sum(transition_counts, axis=0) emission_prob = (emission_counts.T / np.sum(emission_counts.T, axis=0)).T return transition_prob, emission_prob
function
python
95
public void read(EquipmentModelList emList) throws DatabaseException, SQLException { Logger.instance().log(Logger.E_DEBUG1,"Reading EquipmentModels..."); _emList = emList; Statement stmt = createStatement(); String q = "SELECT id, name, company, model, description, equipmentType " + "FROM EquipmentModel"; ResultSet rs = stmt.executeQuery( q ); if (rs != null) _makeEquipmentModels(rs, false); stmt.close(); }
function
java
96
void* Partitions::FastMalloc(size_t n, const char* type_name) { #if !BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC) return FastMallocPartition()->Alloc(n, type_name); #else return malloc(n); #endif }
function
c++
97
class EmailAddress extends SystemObject { static fromJson(text) { return new EmailAddress(text); } constructor(text) { super(); Preconditions.requireText(text, "text"); this.textValue = text; } text() { return this.textValue; } toJson() { return this.text(); } typeName() { return TYPE_NAME; } equals(other) { return this === other || (other instanceof EmailAddress && this.text() === other.text()); } toString() { return this.text(); } }
class
javascript
98
public void killAlgorithm() { killThread.set(true); algorithmThread.interrupt(); if(isPaused.get()) { threadKillCleanup(); } }
function
java
99
End of preview. Expand in Data Studio

No dataset card yet

Downloads last month
12