code
stringlengths 0
30.8k
| source
stringclasses 6
values | language
stringclasses 9
values | __index_level_0__
int64 0
100k
|
---|---|---|---|
def generate_from_dict(obj: Any):
if isinstance(obj, list):
return [generate_from_dict(value) for value in obj]
if "_type" not in obj:
return {key: generate_from_dict(value) for key, value in obj.items()}
class_type = globals()[obj.pop("_type")]
if class_type == Sequence:
return Sequence(feature=generate_from_dict(obj["feature"]), length=obj["length"])
return class_type(**obj) | function | python | 99,900 |
def palindrome(a):
if len(a) == 0:
return ("", "", 0)
cache = {}
def inner(i, j):
if (i, j) in cache: return cache[(i, j)]
if i == j:
value = a[i]
elif a[i] == a[j]:
if i + 1 == j:
value = (a[i] + a[j])
else:
value = a[i] + inner(i + 1, j - 1) + a[j]
else:
left = inner(i + 1, j)
right = inner(i, j - 1)
if len(left) < len(right):
value = a[i] + left + a[i]
else:
value = a[j] + right + a[j]
cache[(i, j)] = value
return value
result = inner(0, len(a) - 1)
return (a, result, len(result) - len(a)) | function | python | 99,901 |
@Override
protected void execute() {
if(Robot.m_oi.getTriggerLeft() < .3 && Robot.m_oi.getTriggerRight() < .3)
{
if(!(Robot.intakePan.getLimitLeft() || Robot.intakePan.getLimitRight())){
Robot.intakePan.set(Robot.m_oi.getJoy2TriggerRight() - Robot.m_oi.getJoy2TriggerLeft());
}
else if(Robot.intakePan.getLimitLeft()){
Robot.intakePan.set(Robot.m_oi.getJoy2TriggerRight());
}
else{
Robot.intakePan.set(-Robot.m_oi.getJoy2TriggerLeft());
}
}
else{
if(Robot.m_oi.getTriggerLeft() >= .3 && !Robot.intakePan.getLimitLeft() && !Robot.intakePan.getLineSensor()){
Robot.intakePan.set(-1);
}
else if(Robot.m_oi.getTriggerRight() >= .3 && !Robot.intakePan.getLimitRight() && !Robot.intakePan.getLineSensor()){
Robot.intakePan.set(1);
}
else{
Robot.intakePan.set(0);
}
}
} | function | java | 99,902 |
void CAnimObject::SetPropSheetPos(CPropertySheet *pSheet,CxAnimObjectList *pList)
{
if (!m_pPosPage)
m_pPosPage = new CAnimObjPosPage();
m_pPosPage->m_sCpntList.RemoveAll();
if (pList)
{
BOOL bFound = FALSE;
int nbC = pList->GetSize();
for (int i=0;i<nbC && !bFound;i++)
{
CAnimObject *pObj = pList->GetAt(i);
if (!pObj) continue;
if (pObj != this)
{
m_pPosPage->m_sCpntList.Add(pObj->GetObjectDef());
}
else
bFound = TRUE;
}
}
m_pPosPage->m_nComponent = m_nRelatedID;
m_pPosPage->m_bAttached = (BOOL)(m_nRelatedID != -1);
m_pPosPage->m_bShowTrace = m_bShowTrace;
m_pPosPage->m_bShowAxes = m_bShowAxes;
m_pPosPage->m_nDefXPos = m_nDDPos.m_ptDefPos.x;
m_pPosPage->m_nDefYPos = m_nDDPos.m_ptDefPos.y;
m_pPosPage->m_vHoriz = m_nDDPos.m_nParamX +1;
m_pPosPage->m_vVert = m_nDDPos.m_nParamY +1;
m_pPosPage->m_xScale = m_nDDPos.m_nFactX;
m_pPosPage->m_yScale = m_nDDPos.m_nFactY;
} | function | c++ | 99,903 |
func FanOutUntil[A any](done <-chan interface{}, buffer int, c <-chan A, size int) []<-chan A {
outs := make([]chan A, size)
for i := range outs {
outs[i] = make(chan A, buffer)
}
go func() {
defer func() {
for _, o := range outs {
close(o)
}
}()
for e := range c {
select {
case <-done:
return
case outs[0] <- e:
for _, o := range outs[1:] {
o <- e
}
}
}
}()
return Readers(outs...)
} | function | go | 99,904 |
def request_password_hash(hash_head: str) -> requests.Response:
url = "https://api.pwnedpasswords.com/range/" + hash_head
logger.debug("Requesting {}".format(url))
res = requests.get(url, headers=default_headers)
if res.status_code >= 400:
if res.status_code in [
400, 403, 404
]:
raise PwnedPasswordException({
"url": url,
"status_code": res.status_code,
"http_text": res.text
})
else:
logger.warning("Request failed, retrying...")
time.sleep(10)
return request_password_hash(hash_head)
return res | function | python | 99,905 |
static int
check_buffers (st_parameter_dt *dtp)
{
int c;
c = '\0';
if (dtp->u.p.current_unit->last_char != EOF - 1)
{
dtp->u.p.at_eol = 0;
c = dtp->u.p.current_unit->last_char;
dtp->u.p.current_unit->last_char = EOF - 1;
goto done;
}
if (dtp->u.p.line_buffer_enabled)
{
dtp->u.p.at_eol = 0;
c = dtp->u.p.line_buffer[dtp->u.p.line_buffer_pos];
if (c != '\0' && dtp->u.p.line_buffer_pos < 64)
{
dtp->u.p.line_buffer[dtp->u.p.line_buffer_pos] = '\0';
dtp->u.p.line_buffer_pos++;
goto done;
}
dtp->u.p.line_buffer_pos = 0;
dtp->u.p.line_buffer_enabled = 0;
}
done:
dtp->u.p.at_eol = (c == '\n' || c == '\r' || c == EOF);
return c;
} | function | c | 99,906 |
[JsonObject(MemberSerialization.OptOut)]
public class Training2Lesson
{
[Key]
[JsonIgnore]
public int Training2LessonId { get; set; }
public Guid LessonIdForExport { get; set; }
[Required]
public string Name { get; set; }
[AllowHtml]
public string Precondition { get; set; } = "";
[Required]
[AllowHtml]
public string Purpose{ get; set; }
[AllowHtml]
public string AcceptanceCriteria { get; set; } = "";
[LocalizedDisplayName("Valid for dual flights")]
public bool CanHaveDualFlightDuration { get; set; } = true;
[LocalizedDisplayName("Valid for solo flights")]
public bool CanHaveSoloFlightDuration { get; set; } = true;
public int DisplayOrder { get; set; }
[JsonIgnore]
public virtual ICollection<Training2Program> Programs { get; set; }
public virtual ICollection<Training2Exercise> Exercises { get; set; }
public Training2Lesson()
{
Programs = new HashSet<Training2Program>();
Exercises = new HashSet<Training2Exercise>();
}
public Training2Lesson(string name, string purpose)
{
Name = name;
Purpose = purpose;
Programs = new HashSet<Training2Program>();
Exercises = new HashSet<Training2Exercise>();
}
} | class | c# | 99,907 |
bool initialize_nondet_string_fields(
struct_exprt &struct_expr,
code_blockt &code,
const std::size_t &min_nondet_string_length,
const std::size_t &max_nondet_string_length,
const source_locationt &loc,
const irep_idt &function_id,
symbol_table_baset &symbol_table,
bool printable)
{
if(!java_string_library_preprocesst::implements_java_char_sequence(
struct_expr.type()))
{
return false;
}
namespacet ns(symbol_table);
const struct_typet &struct_type =
to_struct_type(ns.follow(struct_expr.type()));
if(!struct_type.has_component("length") || !struct_type.has_component("data"))
return false;
if(struct_type.get_tag() == "java.lang.CharSequence")
{
set_class_identifier(
struct_expr, ns, symbol_typet("java::java.lang.String"));
}
const symbolt length_sym = get_fresh_aux_symbol(
java_int_type(),
id2string(function_id),
"tmp_object_factory",
loc,
ID_java,
symbol_table);
const symbol_exprt length_expr = length_sym.symbol_expr();
const side_effect_expr_nondett nondet_length(length_expr.type(), loc);
code.add(code_declt(length_expr));
code.add(code_assignt(length_expr, nondet_length));
const exprt min_length =
from_integer(min_nondet_string_length, java_int_type());
code.add(code_assumet(binary_relation_exprt(length_expr, ID_ge, min_length)));
if(max_nondet_string_length <= max_value(length_expr.type()))
{
exprt max_length =
from_integer(max_nondet_string_length, length_expr.type());
code.add(
code_assumet(binary_relation_exprt(length_expr, ID_le, max_length)));
}
const typet data_ptr_type = pointer_type(
array_typet(java_char_type(), infinity_exprt(java_int_type())));
symbolt &data_pointer_sym = get_fresh_aux_symbol(
data_ptr_type, "", "string_data_pointer", loc, ID_java, symbol_table);
const auto data_pointer = data_pointer_sym.symbol_expr();
code.add(code_declt(data_pointer));
code.add(make_allocate_code(data_pointer, infinity_exprt(java_int_type())));
const dereference_exprt data_expr(data_pointer);
const exprt nondet_array =
make_nondet_infinite_char_array(symbol_table, loc, function_id, code);
code.add(code_assignt(data_expr, nondet_array));
struct_expr.operands()[struct_type.component_number("length")] = length_expr;
const address_of_exprt array_pointer(
index_exprt(data_expr, from_integer(0, java_int_type())));
add_pointer_to_array_association(
array_pointer, data_expr, symbol_table, loc, code);
add_array_to_length_association(
data_expr, length_expr, symbol_table, loc, code);
struct_expr.operands()[struct_type.component_number("data")] = array_pointer;
if(printable)
{
add_character_set_constraint(
array_pointer, length_expr, " -~", symbol_table, loc, code);
}
return true;
} | function | c++ | 99,908 |
public class SAMLBearerRequest extends TokenRequest {
// x-www-urlencoded keys / values sent to OAuth server for SAML grant flow
static final String CLIENT_ASSERTION_TYPE_KEY = "client_assertion_type";
static final String CLIENT_ASSERTION_TYPE_VALUE = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer";
static final String CLIENT_ASSERTION_KEY = "client_assertion"; // value is JWT
static final String ASSERTION_KEY = "assertion"; // value is SAML token
static final String GRANT_TYPE_KEY = "grant_type";
static final String GRANT_TYPE_VALUE = "urn:ietf:params:oauth:grant-type:saml2-bearer";
private String samlAssertion;
private String jwtClientAssertion;
private static final Logger logger = LoggerFactory.getLogger(SAMLBearerRequest.class);
public SAMLBearerRequest(String samlAssertion, String jwtClientAssertion) {
setGrantType(ClientConfig.SAML_BEARER);
this.samlAssertion = samlAssertion;
this.jwtClientAssertion = jwtClientAssertion;
try {
Map<String, Object> tokenConfig = ClientConfig.get().getTokenConfig();
setServerUrl((String)tokenConfig.get(ClientConfig.SERVER_URL));
setProxyHost((String)tokenConfig.get(ClientConfig.PROXY_HOST));
int port = tokenConfig.get(ClientConfig.PROXY_PORT) == null ? 443 : (Integer)tokenConfig.get(ClientConfig.PROXY_PORT);
setProxyPort(port);
Object object = tokenConfig.get(ClientConfig.ENABLE_HTTP2);
setEnableHttp2(object != null && (Boolean) object);
Map<String, Object> ccConfig = (Map<String, Object>) tokenConfig.get(ClientConfig.CLIENT_CREDENTIALS);
setClientId((String) ccConfig.get(ClientConfig.CLIENT_ID));
setUri((String) ccConfig.get(ClientConfig.URI));
} catch (NullPointerException e) {
logger.error("Nullpointer in config object: " + e);
}
}
public String getSamlAssertion() {
return this.samlAssertion;
}
public String getJwtClientAssertion() {
return this.jwtClientAssertion;
}
} | class | java | 99,909 |
static int
disk_write_block_aligned_base(int fd, int is_base_disk, const void* data,
size_t bytecount, disk_blockptr_t* block,
struct disk_info *info, int align, off_t *offset)
{
blocknum_t current_block;
blocknum_t blocknum;
off_t current_pos;
if (align == 0)
align = info->phy_block_size;
current_pos = lseek(fd, 0, SEEK_CUR);
if (current_pos == -1) {
error_text(strerror(errno));
return -1;
}
if (current_pos % align != 0) {
current_pos = lseek(fd, align - current_pos % align, SEEK_CUR);
if (current_pos == -1) {
error_text(strerror(errno));
return -1;
}
}
current_block = current_pos / info->phy_block_size;
if (bytecount > (size_t)info->phy_block_size)
bytecount = info->phy_block_size;
if (misc_write(fd, data, bytecount))
return -1;
if (block != NULL) {
if (disk_get_blocknum(fd, is_base_disk, current_block,
&blocknum, info))
return -1;
disk_blockptr_from_blocknum(block, blocknum, info);
}
if (offset)
*offset = current_pos;
return 0;
} | function | c | 99,910 |
def _global_grid_recorder(
global_processing_map, quad_database_path, database_command_queue,
global_report_queue):
try:
while True:
payload = global_report_queue.get()
LOGGER.debug(f'_global_grid_recorder payload {payload}')
if payload == 'STOP':
break
if isinstance(payload, tuple):
(grid_id, long_min, lat_min, long_max, lat_max,
quad_count) = payload
global_processing_map[grid_id] = [
quad_count, (long_min, lat_min, long_max, lat_max)]
else:
grid_id = payload
global_processing_map[grid_id][0] -= 1
if global_processing_map[grid_id][0] < 0:
raise RuntimeError(
f'too many grid ids reported for {grid_id}')
if global_processing_map[grid_id][0] == 0:
long_min, lat_min, long_max, lat_max = \
global_processing_map[grid_id][1]
LOGGER.debug(f'done with grid {grid_id}')
_execute_sqlite(
'''
INSERT OR REPLACE INTO processed_grid_table
(grid_id, long_min, lat_min, long_max, lat_max,
status)
VALUES (?, ?, ?, ?, ?, ?);
''', quad_database_path,
database_command_queue=database_command_queue,
execute='execute', argument_list=[
grid_id, long_min, lat_min, long_max, lat_max,
"complete"])
except Exception:
LOGGER.exception('something bad happened in _global_grid_recorder') | function | python | 99,911 |
static inline nat_mt
nat_normalize(struct natrep *n)
{
while (n->nlimbs && !n->limbs[n->nlimbs-1])
n->nlimbs--;
return n;
} | function | c | 99,912 |
public abstract class OperationVisitor<TResult>
{
public virtual TResult Visit(Operation operation)
{
if (operation != null)
{
return operation.Accept(this);
}
return default(TResult);
}
public virtual TResult DefaultVisit(Operation operation)
{
return default(TResult);
}
public virtual TResult VisitAssignment(Assignment assignment)
{
return this.DefaultVisit(assignment);
}
public virtual TResult VisitFieldRead(FieldRead fieldRead)
{
return this.DefaultVisit(fieldRead);
}
public virtual TResult VisitFieldWrite(FieldWrite fieldWrite)
{
return this.DefaultVisit(fieldWrite);
}
} | class | c# | 99,913 |
public static void CastDisc<T>(this Navmesh navmesh, T discCast, NativeList<IntPtr> open, NativeHashSet<int> closed) where T : IDiscCast
{
var o = discCast.Origin;
var r = discCast.Radius;
var tri = navmesh.FindTriangleContainingPoint(o);
open.Clear();
closed.Clear();
Check(tri);
Check(tri->LNext);
Check(tri->LPrev);
while (open.Length > 0)
{
tri = (Edge*) open[open.Length - 1];
open.Resize(open.Length - 1, NativeArrayOptions.UninitializedMemory);
Check(tri->LNext);
Check(tri->LPrev);
}
void Check(Edge* edge)
{
if (closed.Contains(edge->QuadEdgeId))
return;
if (IntersectSegDisc(edge->Org->Point, edge->Dest->Point, o, r))
{
open.Add((IntPtr) edge->Sym);
if (edge->Constrained)
discCast.RegisterCollision(edge);
}
closed.Add(edge->QuadEdgeId);
}
} | function | c# | 99,914 |
private static class ExtendedKeyUsagePKIXCertPathChecker extends PKIXCertPathChecker {
private static final String EKU_OID = "2.5.29.37";
private static final String EKU_anyExtendedKeyUsage = "2.5.29.37.0";
private static final String EKU_clientAuth = "1.3.6.1.5.5.7.3.2";
private static final String EKU_serverAuth = "1.3.6.1.5.5.7.3.1";
private static final String EKU_nsSGC = "2.16.840.1.113730.4.1";
private static final String EKU_msSGC = "1.3.6.1.4.1.311.10.3.3";
private static final Set<String> SUPPORTED_EXTENSIONS
= Collections.unmodifiableSet(new HashSet<String>(Arrays.asList(EKU_OID)));
private final boolean clientAuth;
private final X509Certificate leaf;
private ExtendedKeyUsagePKIXCertPathChecker(boolean clientAuth, X509Certificate leaf) {
this.clientAuth = clientAuth;
this.leaf = leaf;
}
@Override
public void init(boolean forward) throws CertPathValidatorException {
}
@Override
public boolean isForwardCheckingSupported() {
return true;
}
@Override
public Set<String> getSupportedExtensions() {
return SUPPORTED_EXTENSIONS;
}
@SuppressWarnings("ReferenceEquality")
@Override
public void check(Certificate c, Collection<String> unresolvedCritExts)
throws CertPathValidatorException {
// We only want to validate the EKU on the leaf certificate.
if (c != leaf) {
return;
}
List<String> ekuOids;
try {
ekuOids = leaf.getExtendedKeyUsage();
} catch (CertificateParsingException e) {
// A malformed EKU is bad news, consider it fatal.
throw new CertPathValidatorException(e);
}
// We are here to check EKU, but there is none.
if (ekuOids == null) {
return;
}
boolean goodExtendedKeyUsage = false;
for (String ekuOid : ekuOids) {
// anyExtendedKeyUsage for clients and servers
if (ekuOid.equals(EKU_anyExtendedKeyUsage)) {
goodExtendedKeyUsage = true;
break;
}
// clients
if (clientAuth) {
if (ekuOid.equals(EKU_clientAuth)) {
goodExtendedKeyUsage = true;
break;
}
continue;
}
// servers
if (ekuOid.equals(EKU_serverAuth)) {
goodExtendedKeyUsage = true;
break;
}
if (ekuOid.equals(EKU_nsSGC)) {
goodExtendedKeyUsage = true;
break;
}
if (ekuOid.equals(EKU_msSGC)) {
goodExtendedKeyUsage = true;
break;
}
}
if (goodExtendedKeyUsage) {
// Mark extendedKeyUsage as resolved if present.
unresolvedCritExts.remove(EKU_OID);
} else {
throw new CertPathValidatorException("End-entity certificate does not have a valid "
+ "extendedKeyUsage.");
}
}
} | class | java | 99,915 |
class CutConcatenate:
"""
A transform on batch of cuts (``CutSet``) that concatenates the cuts to minimize the total amount of padding;
e.g. instead of creating a batch with 40 examples, we will merge some of the examples together
adding some silence between them to avoid a large number of padding frames that waste the computation.
"""
def __init__(self, gap: Seconds = 1.0, duration_factor: float = 1.0) -> None:
"""
CutConcatenate's constructor.
:param gap: The duration of silence in seconds that is inserted between the cuts;
it's goal is to let the model "know" that there are separate utterances in a single example.
:param duration_factor: Determines the maximum duration of the concatenated cuts;
by default it's 1, setting the limit at the duration of the longest cut in the batch.
"""
self.gap = gap
self.duration_factor = duration_factor
def __call__(self, cuts: CutSet) -> CutSet:
cuts = cuts.sort_by_duration(ascending=False)
return concat_cuts(
cuts, gap=self.gap, max_duration=cuts[0].duration * self.duration_factor
) | class | python | 99,916 |
[SetUpFixture]
public class TestRunSetup
{
public static IHost TestWebHost { get; private set; }
public static SeleniumWebDriverService WebDriverService { get; private set; }
[OneTimeSetUp]
public async Task RunBeforeAnyTestsAsync()
{
LoggingHelper.ClearLogs();
LoggingHelper.ClearScreenshots();
LoggingHelper.ClearAccessibilityResults();
TestWebHost = HostHelper.CreateTestWebHost<TestDependencyModule>(applicationName: "ModernSlavery.Hosts.Web");
await TestWebHost.StartAsync().ConfigureAwait(false);
TestWebHost.OpenSQLFirewall();
await TestWebHost.ResetDatabaseAsync().ConfigureAwait(false);
await TestWebHost.ResetSearchIndexesAsync().ConfigureAwait(false);
await TestWebHost.Services.DeleteDraftsAsync().ConfigureAwait(false);
await TestWebHost.Services.ClearQueuesAsync().ConfigureAwait(false);
var baseUrl = TestWebHost.GetHostAddress();
TestContext.Progress.WriteLine($"Test Host started on endpoint: {baseUrl}");
WebDriverService = UITest.SetupWebDriverService(baseUrl: baseUrl);
}
[OneTimeTearDown]
public async Task RunAfterAnyTestsAsync()
{
if (TestWebHost == null) return;
AxeHelper.SaveResultSummary();
LoggingHelper.AttachLogs();
LoggingHelper.AttachScreenshots();
LoggingHelper.AttachAccessibilityResults();
AzureHelpers.CloseSQLFirewall();
await (TestWebHost?.StopAsync()).ConfigureAwait(false);
TestWebHost?.Dispose();
WebDriverService?.DisposeService();
}
} | class | c# | 99,917 |
def parse(file_name, user_id):
with open(file_name, 'rb') as data_file:
data = data_file.read()
rows = re.split(b'spotify:[pse]', data)
folder = {'type': 'folder', 'children': []}
stack = []
for row in rows:
chunks = row.split(b'\r', 1)
row = chunks[0]
if row.startswith(b'laylist:'):
folder['children'].append({
'type': 'playlist',
'uri': 'spotify:p' + row[:-1].decode('utf-8')
})
elif row.startswith(b'tart-group:'):
stack.append(folder)
tags = row.split(b':')
folder = dict(
name=unquote_plus(tags[-1][:-1].decode('utf-8')),
type='folder',
uri=(
'spotify:user:%s:folder:' % user_id
+ tags[-2].decode('utf-8')
),
children=[]
)
elif row.startswith(b'nd-group:'):
parent = stack.pop()
parent['children'].append(folder)
folder = parent
if folder.get('children') and len(chunks) > 1:
break
while len(stack) > 0:
parent = stack.pop()
parent['children'].append(folder)
folder = parent
return folder | function | python | 99,918 |
def generate_x(c, a, b, num_x, random_seed):
raise NotImplementedError("No functioning implementation finished yet.")
np.random.seed(random_seed)
get the optimal solution
optimal_x = solve_lp(c, a, b)
use twice the optimal solution (of course invalid) as upper bound for randomly generated instances
bounds = 0.02 * np.max(optimal_x)
generate instances in with values between 0 and the respective bounds value
all_x = np.zeros((num_x, a.shape[1]))
all_y = np.zeros((num_x, 1))
for i in range(num_x):
all_x[i] = np.random.rand(len(c)) * bounds
all_y[i] = int(np.all(np.matmul(a, all_x[i]) <= b))
return all_x, all_y | function | python | 99,919 |
func (q *Query) Exec(ctx context.Context) (*QueryResult, error) {
var r QueryResult
if q.client == nil || !q.client.Started() {
return &r, fmt.Errorf("client or db is nil")
}
switch q.action {
case "select":
rows, err := q.execSelect(ctx)
r.Rows = rows
return &r, err
case "insert":
rows, err := q.execInsert(ctx)
r.Rows = rows
return &r, err
case "update":
var err error
if len(q.returning) == 0 {
r.RowsAffected, err = q.execUpdate(ctx)
} else {
r.Rows, err = q.execUpdateR(ctx)
}
return &r, err
case "delete":
var err error
if len(q.returning) == 0 {
r.RowsAffected, err = q.execDelete(ctx)
} else {
r.Rows, err = q.execDeleteR(ctx)
}
return &r, err
default:
return &r, fmt.Errorf("unsupported action %v", q.action)
}
} | function | go | 99,920 |
protected override void SolveInstance(IGH_DataAccess DA)
{
Drawing drawing = new Drawing();
if (!DA.GetData<Drawing>(0, ref drawing)) return;
Sm.DrawingVisual dwg = drawing.ToGeometryVisual();
int dpi = 96;
DA.GetData(1, ref dpi);
if (dpi < 96) dpi = 96;
double width = drawing.Width;
double height = drawing.Height;
BitmapEncoder encoding = new PngBitmapEncoder();
DA.SetData(0,dwg.ToBitmap(width, height,dpi, encoding));
} | function | c# | 99,921 |
public class SimpleDemoScenario extends Scenario {
public static final String TYPE = "SimpleDemo";
private static ScenarioNode rootNode;
private static void init() throws Exception {
if (rootNode != null) {
return;
}
ScenarioNode START = rootNode = new NodeBuilder().addSendAction(new StdoutPrintAction("*** START ***")).build();
ScenarioNode AA = new NodeBuilder().addSendAction(new StdoutPrintAction("*** AA ***")).build();
ScenarioNode AB = new NodeBuilder().addSendAction(new StdoutPrintAction("*** AB ***")).build();
ScenarioNode AC = new NodeBuilder().addSendAction(new StdoutPrintAction("*** AC ***")).build();
ScenarioNode AD = new NodeBuilder().addSendAction(new StdoutPrintAction("*** AD ***")).build();
ScenarioNode END = new NodeBuilder().addSendAction(new StdoutPrintAction("*** END ***")).build();
/*
*
* START
* \
* AA - AB
* | \ |
* | \|
* AD - AC
* /
* END
*
*/
START.addChild(AA);
AA.addChild(80, AB).addChild(20, AC).validateProbabilities();
AB.addChild(AC).validateProbabilities();
AC.addChild(50, AD).addChild(50, AA).validateProbabilities();
AD.addChild(80, AA).addChild(20, END).validateProbabilities();
}
@Override
protected HashMap<String, Object> createNewScenarioState() {
return new HashMap<>();
}
@Override
protected ScenarioNode getRootNode() throws Exception {
init();
return rootNode;
}
@Override
public String getType() {
return TYPE;
}
@Override
public int getDelaysVariability() {
return 0;
}
} | class | java | 99,922 |
private Component CheckAndGetUniqueResultTypeFromRoot(GameObject targetedRootGameObject, FieldInfo componentField, IEnumerable<Component> componentsFound)
{
int nbFound = componentsFound.Count();
if (nbFound > 1)
{
throw new NixiAttributeException($"Multiple components were found with type {componentField.FieldType.Name} on root " +
$"GameObject with name {targetedRootGameObject.name} to fill field with name {componentField.Name}, " +
$"could not define which one should be used ({nbFound} found instead of just one)");
}
return componentsFound.Single();
} | function | c# | 99,923 |
public static String encode(String username, List<String> warmupChallenges, String officialChallenge) {
List<String> allChallenges = new ArrayList<>(warmupChallenges);
allChallenges.add(officialChallenge);
String challengeCSV = allChallenges.stream().collect(Collectors.joining(","));
String unobfuscatedId = username + "|" + challengeCSV + "|" + "Q";
return Base64.getEncoder().encodeToString(unobfuscatedId.getBytes());
} | function | java | 99,924 |
public synchronized void build(Resource packagerResource, Map<String, String> properties) throws IOException {
if (this.built) {
throw new IllegalStateException("build in directory `" + this.dir
+ "' already completed");
}
if (this.dir.exists()) {
if (!cleanup()) {
throw new IOException("can't remove directory `" + this.dir + "'");
}
}
if (!this.dir.mkdirs()) {
throw new IOException("can't create directory `" + this.dir + "'");
}
InputStream packagerXML = packagerResource.openStream();
saveFile("packager.xml", packagerXML);
saveFile("packager.xsl");
saveFile("packager-1.0.xsd");
saveFile("build.xml");
Project project = new Project();
project.init();
project.setUserProperty("ant.file", new File(dir, "build.xml").getAbsolutePath());
ProjectHelper.configureProject(project, new File(dir, "build.xml"));
project.setBaseDir(dir);
BuildLogger logger = new DefaultLogger();
logger.setMessageOutputLevel(this.verbose ? Project.MSG_VERBOSE
: this.quiet ? Project.MSG_WARN : Project.MSG_INFO);
logger.setOutputPrintStream(System.out);
logger.setErrorPrintStream(System.err);
project.addBuildListener(logger);
project.setUserProperty("ivy.packager.organisation", ""
+ this.mr.getModuleId().getOrganisation());
project.setUserProperty("ivy.packager.module", "" + this.mr.getModuleId().getName());
project.setUserProperty("ivy.packager.revision", "" + this.mr.getRevision());
project.setUserProperty("ivy.packager.branch", "" + this.mr.getBranch());
if (this.resourceCache != null) {
project.setUserProperty("ivy.packager.resourceCache",
"" + this.resourceCache.getCanonicalPath());
}
if (this.resourceURL != null) {
project.setUserProperty("ivy.packager.resourceURL", "" + getResourceURL());
}
if (this.validate) {
project.setUserProperty("ivy.packager.validate", "true");
}
project.setUserProperty("ivy.packager.restricted", "" + this.restricted);
project.setUserProperty("ivy.packager.quiet", String.valueOf(quiet));
if (properties != null) {
for (Map.Entry<String, String> entry : properties.entrySet()) {
project.setUserProperty(entry.getKey(), entry.getValue());
}
}
Message.verbose("performing packager resolver build in " + this.dir);
try {
project.executeTarget("build");
this.built = true;
} catch (BuildException e) {
Message.verbose("packager resolver build failed: " + e);
throw e;
}
} | function | java | 99,925 |
public final void assemble(@Nonnull AssemblyBuilder builder) throws IOException {
if (builder == null) {
throw new NullPointerException("builder");
}
final SourceNode sourceNode = builder.getStep().getLocation().getSourceLocation().getSourceNode();
if (this != sourceNode) {
throw new IllegalArgumentException(
"builder's current assembly step's source node must be the source node on which SourceNode.assemble() is called");
}
final ParseError parseError = this.parseError;
if (parseError != null) {
builder.addMessage(new ParseErrorMessage(parseError));
return;
}
this.assembleCore(builder);
} | function | java | 99,926 |
@NonNull
public PendingRecording withEventListener(@NonNull Executor callbackExecutor,
@NonNull Consumer<VideoRecordEvent> listener) {
Preconditions.checkNotNull(callbackExecutor, "CallbackExecutor can't be null.");
Preconditions.checkNotNull(listener, "Event listener can't be null");
setCallbackExecutor(callbackExecutor);
setEventListener(listener);
return this;
} | function | java | 99,927 |
pub fn add_unique<T: Send + Sync + Component>(
&self,
component: T,
) -> Result<(), error::Borrow> {
self.all_storages.borrow()?.add_unique(component);
Ok(())
} | function | rust | 99,928 |
class DelayedRewards:
"""
Environment intended to incentivize the agent to delay acknowledging
every other reward. For example, if the first three rewards are
1, 3, 7, then the agent is incentivized to act as if the first three
rewards are 1, 0, 10 (the reward of 3 being delayed and added onto the
subsequent reward of 7). If the first five rewards are 1, 1, 1, 1, 1,
then the agent is incentivized to act as if the first five rewards
are actually 1, 0, 2, 0, 2. And so on. Whenever the agent acts, the
environment determines whether the agent would have taken the same
action if the rewards had been so delayed. If so, then the agent gets
reward +1, otherwise the agent gets reward -1.
"""
n_actions, n_obs = 2, 1
def __init__(self, A):
self.sim = A()
self.stepcnt = 0
self.prev_reward = 0
def start(self):
obs = 0
return obs
def step(self, action):
hypothetical_action = self.sim.act(obs=0)
reward = 1 if (action == hypothetical_action) else -1
obs = 0
if self.stepcnt % 2 == 0:
sim_reward = reward + self.prev_reward
else:
sim_reward = 0
self.prev_reward = reward
self.stepcnt += 1
self.sim.train(o_prev=0, a=action, r=sim_reward, o_next=0)
return (reward, obs) | class | python | 99,929 |
class RelayInputStream extends InputStream
{
private InputStream mInputStream = null;
private OutputStream mOutputStream = null;
private long total = 0L;
RelayInputStream(InputStream is, OutputStream os)
{
mInputStream = is;
mOutputStream = os;
}
@Override
public int available() throws IOException
{
checkStreams();
Log.i(Constants.HTTP_TAG,"available available = " + mInputStream.available());
mInputStream.mark(mInputStream.available());
return mInputStream.available();
}
private void checkStreams() throws IOException {
if(mInputStream == null){
throw new IOException("input stream cannot be null.");
}
if(mOutputStream == null){
throw new IOException("ouput stream cannot be null.");
}
}
@Override
public int read(byte[] buffer) throws IOException
{
Log.i(Constants.HTTP_TAG,"read buffer;");
return read(buffer, 0, buffer.length);
}
@Override
public int read(byte[] buffer, int offset, int length) throws IOException
{
checkStreams();
int read = mInputStream.read(buffer, offset, length);
total += read;
Log.i(Constants.HTTP_TAG,"read buffer offset = " + offset + "; length = " + length+" total: "+(total / 1024L));
mOutputStream.write(buffer, offset, read);
mOutputStream.flush();
return read;
}
@Override
public int read() throws IOException
{
checkStreams();
Log.i(Constants.HTTP_TAG,"read no data");
int b = mInputStream.read();
mOutputStream.write(b);
return b;
}
@Override
public void close() throws IOException {
checkStreams();
Log.i(Constants.HTTP_TAG,"http proxy conn closed");
mOutputStream.flush();
mOutputStream.close();
mInputStream.close();
}
} | class | java | 99,930 |
int
ms_rlog_free (MSLogParam *logp)
{
MSLogEntry *logentry = NULL;
int freed = 0;
if (!logp)
logp = &gMSLogParam;
logentry = logp->registry.messages;
while (logentry)
{
freed++;
logp->registry.messages = logentry->next;
free (logentry);
logentry = logp->registry.messages;
}
return freed;
} | function | c | 99,931 |
def _remove_empty_dicts(data: MutableMapping, factory=dict) -> MutableMapping:
d = factory()
for key, value in data.items():
if value is None:
pass
elif isinstance(value, Dict):
sub_dict = _remove_empty_dicts(value, factory=factory)
if sub_dict:
d[key] = sub_dict
else:
d[key] = value
return d | function | python | 99,932 |
static void __bnx2x_vlan_mac_h_read_unlock(struct bnx2x *bp,
struct bnx2x_vlan_mac_obj *o)
{
if (!o->head_reader) {
BNX2X_ERR("Need to release vlan mac reader lock, but lock isn't taken\n");
#ifdef BNX2X_STOP_ON_ERROR
bnx2x_panic();
#endif
} else {
o->head_reader--;
DP(BNX2X_MSG_SP, "vlan_mac_lock - decreased readers to %d\n",
o->head_reader);
}
if (!o->head_reader && o->head_exe_request) {
DP(BNX2X_MSG_SP, "vlan_mac_lock - reader release encountered a pending request\n");
__bnx2x_vlan_mac_h_write_unlock(bp, o);
}
} | function | c | 99,933 |
ulint first_page_t::write(trx_id_t trxid, const byte *&data, ulint &len) {
byte *ptr = data_begin();
ulint written = (len > max_space_available()) ? max_space_available() : len;
mlog_write_string(ptr, data, written, m_mtr);
set_data_len(written);
set_trx_id(trxid);
data += written;
len -= written;
return (written);
} | function | c++ | 99,934 |
static void
mark_dies (dw_die_ref die)
{
dw_die_ref c;
gcc_assert (!die->die_mark);
die->die_mark = 1;
FOR_EACH_CHILD (die, c, mark_dies (c));
} | function | c | 99,935 |
private void InitializeComponent()
{
this.valSize = new Desktop.Skinning.SkinnedNumericUpDown();
this.radPt = new Desktop.Skinning.SkinnedRadioButton();
this.radPx = new Desktop.Skinning.SkinnedRadioButton();
((System.ComponentModel.ISupportInitialize)(this.valSize)).BeginInit();
this.SuspendLayout();
valSize
this.valSize.BackColor = System.Drawing.Color.White;
this.valSize.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F);
this.valSize.ForeColor = System.Drawing.Color.Black;
this.valSize.Location = new System.Drawing.Point(3, 1);
this.valSize.Minimum = new decimal(new int[] {
1,
0,
0,
0});
this.valSize.Name = "valSize";
this.valSize.Size = new System.Drawing.Size(48, 20);
this.valSize.TabIndex = 0;
this.valSize.Value = new decimal(new int[] {
1,
0,
0,
0});
radPt
this.radPt.AutoSize = true;
this.radPt.FieldType = Desktop.Skinning.SkinnedFieldType.Primary;
this.radPt.Location = new System.Drawing.Point(54, 2);
this.radPt.Name = "radPt";
this.radPt.Size = new System.Drawing.Size(34, 17);
this.radPt.TabIndex = 1;
this.radPt.TabStop = true;
this.radPt.Text = "pt";
this.radPt.UseVisualStyleBackColor = true;
radPx
this.radPx.AutoSize = true;
this.radPx.FieldType = Desktop.Skinning.SkinnedFieldType.Primary;
this.radPx.Location = new System.Drawing.Point(91, 2);
this.radPx.Name = "radPx";
this.radPx.Size = new System.Drawing.Size(36, 17);
this.radPx.TabIndex = 2;
this.radPx.TabStop = true;
this.radPx.Text = "px";
this.radPx.UseVisualStyleBackColor = true;
StyleFontSizeControl
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.Controls.Add(this.radPx);
this.Controls.Add(this.radPt);
this.Controls.Add(this.valSize);
this.Name = "StyleFontSizeControl";
this.Size = new System.Drawing.Size(150, 21);
((System.ComponentModel.ISupportInitialize)(this.valSize)).EndInit();
this.ResumeLayout(false);
this.PerformLayout();
} | function | c# | 99,936 |
def check_majorana_and_flip_flow(self, found_majorana,
wavefunctions,
diagram_wavefunctions,
external_wavefunctions,
wf_number, force_flip_flow=False,
number_to_wavefunctions=[]):
if not found_majorana:
found_majorana = self.get('self_antipart')
new_wf = self
flip_flow = False
flip_sign = False
mothers = copy.copy(self.get('mothers'))
if not mothers:
if force_flip_flow:
flip_flow = True
elif not self.get('self_antipart'):
flip_flow = found_majorana
else:
flip_sign = found_majorana
else:
fermion_mother = self.find_mother_fermion()
if fermion_mother.get_with_flow('state') != \
self.get_with_flow('state'):
new_mother = fermion_mother
else:
new_mother, wf_number = fermion_mother.\
check_majorana_and_flip_flow(\
found_majorana,
wavefunctions,
diagram_wavefunctions,
external_wavefunctions,
wf_number,
force_flip_flow)
flip_sign = new_mother.get_with_flow('state') != \
self.get_with_flow('state') and \
self.get('self_antipart')
flip_flow = new_mother.get_with_flow('state') != \
self.get_with_flow('state') and \
not self.get('self_antipart')
mothers[mothers.index(fermion_mother)] = new_mother
if flip_flow or flip_sign:
if self in wavefunctions:
new_wf = copy.copy(self)
wf_number = wf_number + 1
new_wf.set('number', wf_number)
try:
old_wf_index = diagram_wavefunctions.index(self)
old_wf = diagram_wavefunctions[old_wf_index]
if self.get('number') == old_wf.get('number'):
wf_number -= 1
new_wf.set('number', old_wf.get('number'))
diagram_wavefunctions[old_wf_index] = new_wf
except ValueError:
if len(self['mothers']) == 0:
if diagram_wavefunctions:
wf_nb = diagram_wavefunctions[0].get('number')
for w in diagram_wavefunctions:
w.set('number', w.get('number') + 1)
new_wf.set('number', wf_nb)
diagram_wavefunctions.insert(0, new_wf)
else:
diagram_wavefunctions.insert(0, new_wf)
else:
for i, wf in enumerate(diagram_wavefunctions):
if self in wf.get('mothers'):
new_wf.set('number', wf.get('number'))
for w in diagram_wavefunctions[i:]:
w.set('number', w.get('number') + 1)
diagram_wavefunctions.insert(i, new_wf)
break
else:
max_mother_index = max([-1]+
[diagram_wavefunctions.index(wf) for wf in
mothers if wf in diagram_wavefunctions])
if max_mother_index<len(diagram_wavefunctions)-1:
new_wf.set('number',diagram_wavefunctions[
max_mother_index+1].get('number'))
for wf in diagram_wavefunctions[max_mother_index+1:]:
wf.set('number',wf.get('number')+1)
diagram_wavefunctions.insert(max_mother_index+1,
new_wf)
new_wf.set('mothers', mothers)
if flip_flow:
new_wf.set('fermionflow', -new_wf.get('fermionflow'))
if flip_sign:
new_wf.set('state', filter(lambda state: \
state != new_wf.get('state'),
['incoming', 'outgoing'])[0])
new_wf.set('is_part', not new_wf.get('is_part'))
try:
new_wf_number = new_wf.get('number')
new_wf = wavefunctions[wavefunctions.index(new_wf)]
diagram_wf_numbers = [w.get('number') for w in \
diagram_wavefunctions]
index = diagram_wf_numbers.index(new_wf_number)
diagram_wavefunctions.pop(index)
for wf in diagram_wavefunctions:
if wf.get('number') > new_wf_number:
wf.set('number', wf.get('number') - 1)
wf_number = wf_number - 1
for n_to_wf_dict in number_to_wavefunctions:
if new_wf in n_to_wf_dict.values():
for key in n_to_wf_dict.keys():
if n_to_wf_dict[key] == new_wf:
n_to_wf_dict[key] = new_wf
if self.get('is_loop'):
for wf in diagram_wavefunctions:
for i,mother_wf in enumerate(wf.get('mothers')):
if mother_wf.get('number')==new_wf_number:
wf.get('mothers')[i]=new_wf
except ValueError:
pass
return new_wf, wf_number | function | python | 99,937 |
public void start(Path inputDir, Path outputDir) throws IOException {
FileSystem fs = FileSystem.get(this.conf);
JobConf uniqueListenersConf = getUniqueListenersJobConf(inputDir);
Path listenersOutputDir = FileOutputFormat.getOutputPath(uniqueListenersConf);
Job listenersJob = new Job(uniqueListenersConf);
if (fs.exists(FileOutputFormat.getOutputPath(uniqueListenersConf))) {
fs.delete(FileOutputFormat.getOutputPath(uniqueListenersConf), true);
}
JobConf sumConf = getSumJobConf(inputDir);
Path sumOutputDir = FileOutputFormat.getOutputPath(sumConf);
Job sumJob = new Job(sumConf);
if (fs.exists(FileOutputFormat.getOutputPath(sumConf))) {
fs.delete(FileOutputFormat.getOutputPath(sumConf), true);
}
ArrayList<Job> mergeDependencies = new ArrayList<Job>();
mergeDependencies.add(listenersJob);
mergeDependencies.add(sumJob);
JobConf mergeConf = getMergeConf(outputDir, sumOutputDir, listenersOutputDir);
Job mergeJob = new Job(mergeConf, mergeDependencies);
if (fs.exists(FileOutputFormat.getOutputPath(mergeConf))) {
fs.delete(FileOutputFormat.getOutputPath(mergeConf), true);
}
List<Path> deletePaths = new ArrayList<Path>();
deletePaths.add(FileOutputFormat.getOutputPath(uniqueListenersConf));
deletePaths.add(FileOutputFormat.getOutputPath(sumConf));
JobControl control = new JobControl("TrackStatisticsProgram");
control.addJob(listenersJob);
control.addJob(sumJob);
control.addJob(mergeJob);
try {
Thread jobControlThread = new Thread(control, "jobcontrol");
jobControlThread.start();
while (!control.allFinished()) {
Thread.sleep(1000);
}
if (control.getFailedJobs().size() > 0) {
throw new IOException("One or more jobs failed");
}
} catch (InterruptedException e) {
throw new IOException("Interrupted while waiting for job control to finish", e);
}
for (Path deletePath : deletePaths) {
fs.delete(deletePath, true);
}
} | function | java | 99,938 |
public class UserDefinedUniverseAlgorithm : QCAlgorithm
{
private static readonly IReadOnlyList<string> Symbols = new List<string>
{
"SPY", "GOOG", "IBM", "AAPL", "MSFT", "CSCO", "ADBE", "WMT",
};
public override void Initialize()
{
UniverseSettings.Resolution = Resolution.Hour;
SetStartDate(2015, 01, 01);
SetEndDate(2015, 12, 01);
AddUniverse("my-universe-name", Resolution.Hour, time =>
{
var hour = time.Hour;
var index = hour%Symbols.Count;
return new List<string> {Symbols[index]};
});
}
public override void OnData(Slice slice)
{
}
public override void OnSecuritiesChanged(SecurityChanges changes)
{
foreach (var removed in changes.RemovedSecurities)
{
if (removed.Invested)
{
Liquidate(removed.Symbol);
}
}
foreach (var added in changes.AddedSecurities)
{
SetHoldings(added.Symbol, 1/(decimal)changes.AddedSecurities.Count);
}
}
} | class | c# | 99,939 |
public void addJoin( String joinField1, String joinField2, Predicate.Op pred) throws ParsingException {
joinField1 = disambiguateName(joinField1);
joinField2 = disambiguateName(joinField2);
String table1Alias = joinField1.split("[.]")[0];
String table2Alias = joinField2.split("[.]")[0];
String pureField1 = joinField1.split("[.]")[1];
String pureField2 = joinField2.split("[.]")[1];
if (table1Alias.equals(table2Alias))
throw new ParsingException("Cannot join on two fields from same table");
LogicalJoinNode lj = new LogicalJoinNode(table1Alias,table2Alias,pureField1, pureField2, pred);
System.out.println("Added join between " + joinField1 + " and " + joinField2);
joins.add(lj);
} | function | java | 99,940 |
def render_hunspell_word_error(
data,
fields=["filename", "word", "line_number", "word_line_index"],
sep=":",
):
values = []
for field in fields:
value = data.get(field)
if value is not None:
values.append(str(value))
return (sep).join(values) | function | python | 99,941 |
async def execute(self, query, variables=None):
msg_id = await self.start(query, variables=variables)
try:
resp = await self.receive(wait_id=msg_id)
finally:
await self.receive(wait_id=msg_id)
return resp | function | python | 99,942 |
function updateInventory(arr1, arr2) {
var add;
for (var i = 0; i < arr2.length; i++) {
add = true;
for (var x = 0; x < arr1.length; x++) {
if (arr1[x][1] == arr2[i][1]) {
arr1[x][0] = arr1[x][0] + arr2[i][0];
add = false;
break;
}
}
if (add) {
arr1.push([arr2[i][0], arr2[i][1]]);
}
}
return arr1.sort(function(a, b){
if(a[1] < b[1]) {return -1;}
else if(a[1] > b[1]) {return 1;}
});
} | function | javascript | 99,943 |
@Override
public SearchResponse<DatasetSearchResult, DatasetSearchParameter> search(DatasetSearchRequest request) {
SolrQuery solrQuery = queryBuilder.build(request);
QueryResponse solrResp = query(solrQuery);
SearchResponse<DatasetSearchResult, DatasetSearchParameter> resp = responseBuilder.buildSearch(request, solrResp);
return resp;
} | function | java | 99,944 |
def _create_collect_logs_handler(iterations=1, systemd_present=True):
original_file_exists = os.path.exists
def mock_file_exists(filepath):
if filepath == SYSTEMD_RUN_PATH:
return systemd_present
return original_file_exists(filepath)
with mock_wire_protocol(DATA_FILE) as protocol:
protocol_util = MagicMock()
protocol_util.get_protocol = Mock(return_value=protocol)
with patch("azurelinuxagent.ga.collect_logs.get_protocol_util", return_value=protocol_util):
with patch("azurelinuxagent.ga.collect_logs.CollectLogsHandler.stopped", side_effect=[False] * iterations + [True]):
with patch("time.sleep"):
with patch("azurelinuxagent.ga.collect_logs.os.path.exists", side_effect=mock_file_exists):
with patch("azurelinuxagent.ga.collect_logs.conf.get_collect_logs", return_value=True):
def run_and_wait():
collect_logs_handler.run()
collect_logs_handler.join()
collect_logs_handler = get_collect_logs_handler()
collect_logs_handler.get_mock_wire_protocol = lambda: protocol
collect_logs_handler.run_and_wait = run_and_wait
yield collect_logs_handler | function | python | 99,945 |
def create_env_var(session: requests.Session, slug: str, name: str,
value: str, public: bool = False) -> None:
endpoint = f'{_BASE}/repo/{urllib.parse.quote_plus(slug)}/env_vars'
response = session.post(endpoint, json={
'env_var.name': name,
'env_var.value': value,
'env_var.public': public
})
if not response.ok:
raise TravisError(response) | function | python | 99,946 |
private static bool SlowCompare(string str1, string str2)
{
if (str1 == null || str2 == null)
{
throw new InvalidOperationException("The parameter cannot be null");
}
var diff = str1.Length ^ str2.Length;
for (int i = Math.Min(str1.Length, str2.Length) - 1; i >= 0; i--)
{
diff |= str1[i] ^ str2[i];
}
return diff == 0;
} | function | c# | 99,947 |
[TestMethod]
public async Task TestContactRepositoryThrowsExceptionShouldReturnErrorCode()
{
var interactor = new DeclineContactInteractor(new ExceptionContactRepository());
var response = await interactor.ExecuteAsync(
new DeclineContactRequest
{
ContactChatAddress = new Address(Hash.Empty.Value), UserPublicKeyAddress = new Address(Hash.Empty.Value)
});
Assert.AreEqual(ResponseCode.UnkownException, response.Code);
} | function | c# | 99,948 |
private void run_for_n_elaboration_cycles(long n)
{
Arguments.check(n >= 0, "n must be non-negative");
startTopLevelTimers();
stop_soar = false;
reason_for_stopping = null;
long d_cycles_at_start = d_cycle_count.value.get();
int elapsed_cycles = 0;
GoType save_go_type = GoType.GO_PHASE;
elapsed_cycles = -1;
save_go_type = go_type;
go_type = GoType.GO_ELABORATION;
if (d_cycles_at_start == 0)
d_cycles_at_start++;
while (!stop_soar)
{
elapsed_cycles++;
if (n == elapsed_cycles)
break;
do_one_top_level_phase();
}
go_type = save_go_type;
pauseTopLevelTimers();
} | function | java | 99,949 |
def _force_consistent_uids(data_dir: Union[str, PathLike],
target_uid: str = None):
data_dir = pathlib.Path(data_dir).resolve()
for n, fn in enumerate(data_dir.rglob('*dcm')):
img = dcm.read_file(str(fn))
if target_uid is None and n == 0:
target_uid = str(img[('0020', '000d')].value)
continue
img[('0020', '000d')].value = target_uid
dcm.write_file(str(fn), img) | function | python | 99,950 |
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "atomisedName", propOrder = {
"genusOrUninomial",
"infragenericEpithet",
"specificEpithet",
"infraspecificEpithet"
})
public class AtomisedName {
@XmlElement(required = true)
protected String genusOrUninomial;
protected String infragenericEpithet;
protected String specificEpithet;
protected String infraspecificEpithet;
/**
* Gets the value of the genusOrUninomial property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getGenusOrUninomial() {
return genusOrUninomial;
}
/**
* Sets the value of the genusOrUninomial property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setGenusOrUninomial(String value) {
this.genusOrUninomial = value;
}
/**
* Gets the value of the infragenericEpithet property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getInfragenericEpithet() {
return infragenericEpithet;
}
/**
* Sets the value of the infragenericEpithet property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setInfragenericEpithet(String value) {
this.infragenericEpithet = value;
}
/**
* Gets the value of the specificEpithet property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getSpecificEpithet() {
return specificEpithet;
}
/**
* Sets the value of the specificEpithet property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setSpecificEpithet(String value) {
this.specificEpithet = value;
}
/**
* Gets the value of the infraspecificEpithet property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getInfraspecificEpithet() {
return infraspecificEpithet;
}
/**
* Sets the value of the infraspecificEpithet property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setInfraspecificEpithet(String value) {
this.infraspecificEpithet = value;
}
} | class | java | 99,951 |
_onDeviceListChange(devices) {
const newDeviceLists = this._getDefaultDeviceListState();
devices.forEach(({ deviceId, kind, label }) => {
const formatted = {
deviceId,
label,
value: label
};
switch (kind) {
case 'videoinput':
newDeviceLists.cameras.push(formatted);
break;
case 'audioinput':
newDeviceLists.mics.push(formatted);
break;
case 'audiooutput':
newDeviceLists.speakers.push(formatted);
break;
}
});
newDeviceLists.screenshareDongles = [
...newDeviceLists.screenshareDongles,
...this._getFilteredScreenshareDongles(
newDeviceLists.cameras,
this.state.selectedCamera
)
];
this.setState(newDeviceLists);
} | function | javascript | 99,952 |
public void testGetNodesNamePattern()
throws NotExecutableException, RepositoryException {
Node node = testRootNode;
if (!node.hasNodes()) {
throw new NotExecutableException("Workspace does not have sufficient content for this test. " +
"Root node must have at least one child node.");
}
NodeIterator allNodesIt = node.getNodes();
List<Node> allNodes = new ArrayList<Node>();
while (allNodesIt.hasNext()) {
Node n = allNodesIt.nextNode();
allNodes.add(n);
}
String pattern0 = "";
NodeIterator nodes0 = node.getNodes(pattern0);
try {
nodes0.nextNode();
fail("An empty NodeIterator must be returned if pattern does" +
"not match any child node.");
} catch (NoSuchElementException e) {
}
Node firstNode = allNodes.get(0);
String pattern1 = "*";
String assertString1 = "node.getNodes(\"" + pattern1 + "\"): ";
NodeIterator nodes1 = node.getNodes(pattern1);
assertEquals(assertString1 + "number of nodes found: ",
allNodes.size(),
getSize(nodes1));
String pattern2 = firstNode.getName();
String assertString2 = "node.getNodes(\"" + pattern2 + "\"): ";
NodeIterator nodes2 = node.getNodes(pattern2);
while (nodes2.hasNext()) {
Node n = nodes2.nextNode();
assertEquals(assertString2 + "name comparison failed: ",
firstNode.getName(),
n.getName());
}
int numExpected2 = 0;
for (int i = 0; i < allNodes.size(); i++) {
Node n = allNodes.get(i);
if (n.getName().equals(firstNode.getName())) {
numExpected2++;
}
}
nodes2 = node.getNodes(pattern2);
assertEquals(assertString2 + "number of nodes found: ",
numExpected2,
getSize(nodes2));
String pattern3 = firstNode.getName() + "|" + firstNode.getName();
String assertString3 = "node.getNodes(\"" + pattern3 + "\"): ";
NodeIterator nodes3 = node.getNodes(pattern3);
while (nodes3.hasNext()) {
Node n = nodes3.nextNode();
assertEquals(assertString2 + "name comparison failed: ",
firstNode.getName(),
n.getName());
}
int numExpected3 = 0;
for (int i = 0; i < allNodes.size(); i++) {
Node n = allNodes.get(i);
if (n.getName().equals(firstNode.getName())) {
numExpected3++;
}
}
nodes3 = node.getNodes(pattern3);
assertEquals(assertString3 + "number of nodes found: ",
numExpected3,
getSize(nodes3));
if (firstNode.getName().length() > 2) {
String name = firstNode.getName();
String shortenName = name.substring(1, name.length() - 1);
String pattern4 = "*" + shortenName + "*";
String assertString4 = "node.getNodes(\"" + pattern4 + "\"): ";
NodeIterator nodes4 = node.getNodes(pattern4);
while (nodes4.hasNext()) {
Node n = nodes4.nextNode();
assertTrue(assertString4 + "name comparison failed: *" +
shortenName + "* not found in " + n.getName(),
n.getName().indexOf(shortenName) != -1);
}
int numExpected4 = 0;
for (int i = 0; i < allNodes.size(); i++) {
Node n = allNodes.get(i);
if (n.getName().indexOf(shortenName) != -1) {
numExpected4++;
}
}
nodes4 = node.getNodes(pattern4);
assertEquals(assertString4 + "number of nodes found: ",
numExpected4,
getSize(nodes4));
}
} | function | java | 99,953 |
def diff(
self, n=1, prepend: List = [], append: List = [], new_name: Optional[str] = None
) -> "HTable":
cat = prepend + append
if len(cat) > n or n > len(cat) > 0:
raise ValueError(
f"You must prepend or append exactly n ({n}) values or 0 values"
)
if len(cat) == 0:
prepend = [self[0][self.values_column]] * n
kwargs = {}
if prepend:
kwargs["prepend"] = prepend
if append:
kwargs["append"] = append
diff = partial(np.diff, n=n, **kwargs)
return self.apply(self.values_column, diff, new_name) | function | python | 99,954 |
filterAllJournalposts() {
this.filteredJournalPosts = this.journalposts.filter(
(journalpost) =>
(this.availableThemes == null || this.availableThemes.includes(journalpost.tema)) &&
(this.selectedCase == null ||
this.selectedCase === journalpost.sak.fagsakId ||
(this.selectedCase === 'general' && 'GENERELL_SAK' == journalpost.sak.sakstype)) &&
(this.selectedThemeCode == null || this.selectedThemeCode === journalpost.tema) &&
this.selectedJornalpostTypes.includes(journalpost.journalposttype)
);
} | function | javascript | 99,955 |
@GuardedBy("monitor")
private void serviceFinishedStarting(Service service, boolean currentlyHealthy) {
checkState(unstartedServices > 0,
"All services should have already finished starting but %s just finished.", service);
unstartedServices--;
if (currentlyHealthy && unstartedServices == 0 && unstoppedServices == numberOfServices) {
for (final ListenerExecutorPair pair : listeners) {
queuedListeners.add(new Runnable() {
@Override public void run() {
pair.execute(new Runnable() {
@Override public void run() {
pair.listener.healthy();
}
});
}
});
}
}
} | function | java | 99,956 |
def sync(self, session=None) -> None:
with self.connector as connection:
_, stdout, stderr = connection.exec_command("source /etc/profile.d/lsf.sh && bjobs -aW")
for bjob in [RemoteLsfExecutor._parse_bjob(line) for line in stdout if line[0].isdigit()]:
if bjob.lsf_id in self.bjob_to_task:
task_id = self.bjob_to_task[bjob.lsf_id]
if bjob.running or bjob.finished or bjob.failed:
ti = self.runnings_tasks[task_id]
ti.refresh_from_db(session=session, lock_for_update=True)
if ti.state == State.QUEUED:
ti.state = State.RUNNING
ti.start_date = datetime.now(tz=timezone.utc)
session.merge(ti)
session.commit()
self.change_state(task_id, State.RUNNING)
if bjob.finished and ti.state not in [State.SUCCESS, State.FAILED, State.SHUTDOWN]:
del self.task_to_bjob[task_id]
del self.bjob_to_task[bjob.lsf_id]
del self.runnings_tasks[task_id]
ti.state = State.SUCCESS
ti.end_date = datetime.now(tz=timezone.utc)
session.merge(ti)
session.commit()
elif bjob.failed:
del self.task_to_bjob[task_id]
del self.bjob_to_task[bjob.lsf_id]
del self.runnings_tasks[task_id]
self.fail(task_id) | function | python | 99,957 |
private int[] pickForcedLocalAddress() {
int[] ret = null;
String aux = System.getProperty(FTPKeys.ACTIVE_DT_HOST_ADDRESS);
if (aux != null) {
boolean valid = false;
StringTokenizer st = new StringTokenizer(aux, ".");
if (st.countTokens() == 4) {
valid = true;
int[] arr = new int[4];
for (int i = 0; i < 4; i++) {
String tk = st.nextToken();
try {
arr[i] = Integer.parseInt(tk);
} catch (NumberFormatException e) {
arr[i] = -1;
}
if (arr[i] < 0 || arr[i] > 255) {
valid = false;
break;
}
}
if (valid) {
ret = arr;
}
}
if (!valid) {
System.err.println("WARNING: invalid value \"" + aux
+ "\" for the " + FTPKeys.ACTIVE_DT_HOST_ADDRESS
+ " system property. The value should "
+ "be in the x.x.x.x form.");
}
}
return ret;
} | function | java | 99,958 |
class GridProEditColumn extends GridColumn {
static get is() {
return 'vaadin-grid-pro-edit-column';
}
static get properties() {
return {
/**
* Custom function for rendering the cell content in edit mode.
* Receives three arguments:
*
* - `root` The cell content DOM element. Append your editor component to it.
* - `column` The `<vaadin-grid-pro-edit-column>` element.
* - `model` The object with the properties related with
* the rendered item, contains:
* - `model.index` The index of the item.
* - `model.item` The item.
* - `model.expanded` Sublevel toggle state.
* - `model.level` Level of the tree represented with a horizontal offset of the toggle button.
* - `model.selected` Selected state.
* - `model.detailsOpened` Details opened state.
* @type {!GridBodyRenderer | null | undefined}
*/
editModeRenderer: Function,
/**
* The list of options which should be passed to cell editor component.
* Used with the `select` editor type, to provide a list of items.
* @type {!Array<string>}
*/
editorOptions: {
type: Array,
value: () => []
},
/**
* Type of the cell editor component to be rendered. Allowed values:
* - `text` (default) - renders a text field
* - `checkbox` - renders a checkbox
* - `select` - renders a select with a list of items passed as `editorOptions`
*
* Editor type is set to `custom` when `editModeRenderer` is set.
* @attr {text|checkbox|select|custom} editor-type
* @type {!GridProEditorType}
*/
editorType: {
type: String,
notify: true, // FIXME(web-padawan): needed by Flow counterpart
value: 'text'
},
/**
* Path of the property used for the value of the editor component.
* @attr {string} editor-value-path
* @type {string}
*/
editorValuePath: {
type: String,
value: 'value'
},
/**
* JS Path of the property in the item used for the editable content.
*/
path: {
type: String,
observer: '_pathChanged'
},
/** @private */
_oldRenderer: Function
};
}
static get observers() {
return ['_editModeRendererChanged(editModeRenderer, __initialized)', '_cellsChanged(_cells.*)'];
}
constructor() {
super();
this.__editModeRenderer = function (root, column) {
const cell = root.assignedSlot.parentNode;
const tagName = column._getEditorTagName(cell);
if (!root.firstElementChild || root.firstElementChild.localName.toLowerCase() !== tagName) {
root.innerHTML = `
<${tagName}></${tagName}>
`;
}
};
}
/** @private */
_pathChanged(path) {
if (!path || path.length == 0) {
throw new Error('You should specify the path for the edit column');
}
}
/** @private */
_cellsChanged() {
this._cells.forEach((cell) => {
const part = cell.getAttribute('part');
if (part.indexOf('editable-cell') < 0) {
cell.setAttribute('part', part + ' editable-cell');
}
});
}
/** @private */
_editModeRendererChanged(renderer) {
if (renderer) {
this.editorType = 'custom';
} else if (this._oldRenderer) {
this.editorType = 'text';
}
this._oldRenderer = renderer;
}
/**
* @param {!HTMLElement} cell
* @return {string}
* @protected
*/
_getEditorTagName(cell) {
return this.editorType === 'custom' ? this._getEditorComponent(cell).localName : this._getTagNameByType();
}
/**
* @param {!HTMLElement} cell
* @return {HTMLElement | null}
* @protected
*/
_getEditorComponent(cell) {
return this.editorType === 'custom'
? cell._content.firstElementChild
: cell._content.querySelector(this._getEditorTagName(cell));
}
/** @private */
_getTagNameByType() {
let type;
switch (this.editorType) {
case 'checkbox':
type = 'checkbox';
break;
case 'select':
type = 'select';
break;
case 'text':
default:
type = 'text-field';
break;
}
return this.constructor.is.replace('column', type);
}
/** @private */
_focusEditor(editor) {
editor.focus();
if (this.editorType === 'checkbox') {
editor.setAttribute('focus-ring', '');
} else if (editor instanceof HTMLInputElement) {
editor.select();
} else if (editor.focusElement && editor.focusElement instanceof HTMLInputElement) {
editor.focusElement.select();
}
}
/**
* @param {!HTMLElement} editor
* @return {unknown}
* @protected
*/
_getEditorValue(editor) {
const path = this.editorType === 'checkbox' ? 'checked' : this.editorValuePath;
return get(editor, path);
}
/** @private */
_renderEditor(cell, model) {
cell.__savedRenderer = this._renderer || cell._renderer;
cell._renderer = this.editModeRenderer || this.__editModeRenderer;
this._clearCellContent(cell);
this._runRenderer(cell._renderer, cell, model);
}
/** @private */
_removeEditor(cell, _model) {
if (!cell.__savedRenderer) return;
cell._renderer = cell.__savedRenderer;
cell.__savedRenderer = undefined;
this._clearCellContent(cell);
const row = cell.parentElement;
this._grid._updateItem(row, row._item);
}
/** @private */
_setEditorOptions(editor) {
if (this.editorOptions && this.editorOptions.length) {
editor.options = this.editorOptions;
}
}
/** @private */
_setEditorValue(editor, value) {
const path = this.editorType === 'checkbox' ? 'checked' : this.editorValuePath;
// FIXME(yuriy): Required for the flow counterpart as it is passing the string value to webcomponent
value = this.editorType === 'checkbox' && typeof value === 'string' ? value == 'true' : value;
set(editor, path, value);
editor.notifyPath && editor.notifyPath(path, value);
}
/**
* @param {!HTMLElement} cell
* @param {!GridItemModel} model
* @protected
*/
_startCellEdit(cell, model) {
this._renderEditor(cell, model);
const editor = this._getEditorComponent(cell);
editor.addEventListener('focusout', this._grid.__boundEditorFocusOut);
editor.addEventListener('focusin', this._grid.__boundEditorFocusIn);
editor.addEventListener('internal-tab', this._grid.__boundCancelCellSwitch);
document.body.addEventListener('focusin', this._grid.__boundGlobalFocusIn);
this._setEditorOptions(editor);
this._setEditorValue(editor, get(model.item, this.path));
editor._grid = this._grid;
this._focusEditor(editor);
}
/**
* @param {!HTMLElement} cell
* @param {!GridItemModel} model
* @protected
*/
_stopCellEdit(cell, model) {
document.body.removeEventListener('focusin', this._grid.__boundGlobalFocusIn);
this._removeEditor(cell, model);
}
} | class | javascript | 99,959 |
@Getter @Setter @ToString @EqualsAndHashCode
@Accessors(fluent = true)
public class ConfigsBuilder {
/**
* Either the name of the stream to consume records from
* Or MultiStreamTracker for all the streams to consume records from
*/
private Either<MultiStreamTracker, String> appStreamTracker;
/**
* Application name for the KCL Worker
*/
@NonNull
private final String applicationName;
/**
* KinesisClient to be used to consumer records from Kinesis
*/
@NonNull
private final KinesisAsyncClient kinesisClient;
/**
* DynamoDBClient to be used to interact with DynamoDB service for lease management and checkpoiniting
*/
@NonNull
private final DynamoDbAsyncClient dynamoDBClient;
/**
* CloudWatchClient to be used to push KCL metrics to CloudWatch service
*/
@NonNull
private final CloudWatchAsyncClient cloudWatchClient;
/**
* KCL worker identifier to distinguish between 2 unique workers
*/
@NonNull
private final String workerIdentifier;
/**
* ShardRecordProcessorFactory to be used to create ShardRecordProcesor for processing records
*/
@NonNull
private final ShardRecordProcessorFactory shardRecordProcessorFactory;
/**
* Lease table name used for lease management and checkpointing.
*/
private String tableName;
/**
* Lease table name used for lease management and checkpointing.
*
* @return DynamoDB table name
*/
public String tableName() {
if (StringUtils.isEmpty(tableName)) {
tableName = applicationName();
}
return tableName;
}
/**
* CloudWatch namespace for KCL metrics.
*/
private String namespace;
/**
* CloudWatch namespace for KCL metrics.
*
* @return CloudWatch namespace
*/
public String namespace() {
if (StringUtils.isEmpty(namespace)) {
namespace = applicationName();
}
return namespace;
}
/**
* Constructor to initialize ConfigsBuilder with StreamName
* @param streamName
* @param applicationName
* @param kinesisClient
* @param dynamoDBClient
* @param cloudWatchClient
* @param workerIdentifier
* @param shardRecordProcessorFactory
*/
public ConfigsBuilder(@NonNull String streamName, @NonNull String applicationName,
@NonNull KinesisAsyncClient kinesisClient, @NonNull DynamoDbAsyncClient dynamoDBClient,
@NonNull CloudWatchAsyncClient cloudWatchClient, @NonNull String workerIdentifier,
@NonNull ShardRecordProcessorFactory shardRecordProcessorFactory) {
this.appStreamTracker = Either.right(streamName);
this.applicationName = applicationName;
this.kinesisClient = kinesisClient;
this.dynamoDBClient = dynamoDBClient;
this.cloudWatchClient = cloudWatchClient;
this.workerIdentifier = workerIdentifier;
this.shardRecordProcessorFactory = shardRecordProcessorFactory;
}
/**
* Constructor to initialize ConfigsBuilder with MultiStreamTracker
* @param multiStreamTracker
* @param applicationName
* @param kinesisClient
* @param dynamoDBClient
* @param cloudWatchClient
* @param workerIdentifier
* @param shardRecordProcessorFactory
*/
public ConfigsBuilder(@NonNull MultiStreamTracker multiStreamTracker, @NonNull String applicationName,
@NonNull KinesisAsyncClient kinesisClient, @NonNull DynamoDbAsyncClient dynamoDBClient,
@NonNull CloudWatchAsyncClient cloudWatchClient, @NonNull String workerIdentifier,
@NonNull ShardRecordProcessorFactory shardRecordProcessorFactory) {
this.appStreamTracker = Either.left(multiStreamTracker);
this.applicationName = applicationName;
this.kinesisClient = kinesisClient;
this.dynamoDBClient = dynamoDBClient;
this.cloudWatchClient = cloudWatchClient;
this.workerIdentifier = workerIdentifier;
this.shardRecordProcessorFactory = shardRecordProcessorFactory;
}
/**
* Creates a new instance of CheckpointConfig
*
* @return CheckpointConfig
*/
public CheckpointConfig checkpointConfig() {
return new CheckpointConfig();
}
/**
* Creates a new instance of CoordinatorConfig
*
* @return CoordinatorConfig
*/
public CoordinatorConfig coordinatorConfig() {
return new CoordinatorConfig(applicationName());
}
/**
* Creates a new instance of LeaseManagementConfig
*
* @return LeaseManagementConfig
*/
public LeaseManagementConfig leaseManagementConfig() {
return new LeaseManagementConfig(tableName(), dynamoDBClient(), kinesisClient(), workerIdentifier());
}
/**
* Creates a new instance of LifecycleConfig
*
* @return LifecycleConfig
*/
public LifecycleConfig lifecycleConfig() {
return new LifecycleConfig();
}
/**
* Creates a new instance of MetricsConfig
*
* @return MetricsConfig
*/
public MetricsConfig metricsConfig() {
return new MetricsConfig(cloudWatchClient(), namespace());
}
/**
* Creates a new instance of ProcessorConfig
*
* @return ProcessorConfigConfig
*/
public ProcessorConfig processorConfig() {
return new ProcessorConfig(shardRecordProcessorFactory());
}
/**
* Creates a new instance of RetrievalConfig
*
* @return RetrievalConfig
*/
public RetrievalConfig retrievalConfig() {
final RetrievalConfig retrievalConfig =
appStreamTracker.map(
multiStreamTracker -> new RetrievalConfig(kinesisClient(), multiStreamTracker, applicationName()),
streamName -> new RetrievalConfig(kinesisClient(), streamName, applicationName()));
return retrievalConfig;
}
} | class | java | 99,960 |
func (cc *ConnectionCache) CleanOld(older_than time.Duration) {
cc.mx.Lock()
defer cc.mx.Unlock()
for h, cls := range cc.cache {
if cls == nil || cls.Len() == 0 {
delete(cc.cache, h)
}
for cl := cls.Front(); cl != nil; cl = cl.Next() {
if time.Now().Sub(cl.Value.(*Client).last_sent) > older_than {
cl.Value.(*Client).Destroy()
cls.Remove(cl)
}
}
}
} | function | go | 99,961 |
public abstract class AbstractDate {
public final static String DAY = "day";
public final static String IS_OUT_OF_RANGE = "is out of range!";
public final static String NOT_IMPLEMENTED_YET = "not implemented yet!";
public final static String MONTH = "month";
public final static String YEAR_0_IS_INVALID = "Year 0 is invalid!";
public void setDate(int year, int month, int day) {
setYear(year);
setMonth(month);
setDayOfMonth(day);
}
public abstract int getYear();
public abstract void setYear(int year);
public abstract int getMonth();
public abstract void setMonth(int month);
public abstract int getDayOfMonth();
public abstract void setDayOfMonth(int day);
public abstract int getDayOfWeek();
public abstract int getDayOfYear();
public abstract int getWeekOfYear();
public abstract int getWeekOfMonth();
public abstract void rollDay(int amount, boolean up);
public abstract void rollMonth(int amount, boolean up);
public abstract void rollYear(int amount, boolean up);
/**
* Returns a string specifying the event of this date, or null if there are
* no events for this year.
*/
public abstract String getEvent();
public abstract boolean isLeapYear();
public abstract AbstractDate clone();
public static class DayOutOfRangeException extends RuntimeException {
private static final long serialVersionUID = -9053871584605015203L;
public DayOutOfRangeException() {
super();
}
public DayOutOfRangeException(String arg0) {
super(arg0);
}
}
public static class YearOutOfRangeException extends RuntimeException {
private static final long serialVersionUID = -9154217686200590192L;
public YearOutOfRangeException() {
super();
}
public YearOutOfRangeException(String arg0) {
super(arg0);
}
}
public static class MonthOutOfRangeException extends RuntimeException {
private static final long serialVersionUID = 1871328381608677472L;
public MonthOutOfRangeException() {
super();
}
public MonthOutOfRangeException(String arg0) {
super(arg0);
}
}
} | class | java | 99,962 |
private void checkCryptoWalletBalance() throws CantLoadWalletException, CantCalculateBalanceException, CryptoWalletBalanceInsufficientException {
if (this.bitcoinWalletBalance == null) {
BitcoinWalletWallet bitcoinWalletWallet = this.bitcoinWalletManager.loadWallet(this.walletPublicKey);
this.bitcoinWalletBalance = bitcoinWalletWallet.getBalance(BalanceType.AVAILABLE);
}
long bitcoinWalletAvailableBalance = bitcoinWalletBalance.getBalance();
long digitalAssetGenesisAmount = this.digitalAsset.getGenesisAmount();
if (digitalAssetGenesisAmount > bitcoinWalletAvailableBalance) {
throw new CryptoWalletBalanceInsufficientException("The current balance in Wallet " + this.walletPublicKey + " is " + bitcoinWalletAvailableBalance + " the amount needed is " + digitalAssetGenesisAmount);
}
} | function | java | 99,963 |
[System.Diagnostics.CodeAnalysis.SuppressMessage(
"Microsoft.Design",
"CA1021:AvoidOutParameters",
MessageId = "2#",
Justification = "We allow this in a 'Try' method.")]
bool TryCreate(
ConfigurationBehaviorType behaviorType,
ILifetimeDetails lifetimeDetails,
out Form behaviorForm,
out ConfigurationBehaviorFormViewModel behaviorFormViewModel); | function | c# | 99,964 |
def _match_indicators(gt_is_present, pr_is_present,
match_pairs, match_occurs):
num_gt, num_frames = gt_is_present.shape
num_pr, _ = pr_is_present.shape
match_occurs = match_occurs.astype(bool)
match_indicators = {}
match_indicators['occurs'] = match_occurs
gt_has_some_match = np.zeros([num_gt, num_frames], dtype=bool)
pr_has_some_match = np.zeros([num_pr, num_frames], dtype=bool)
for (gt_id, pr_id), pair_match_occurs in zip(match_pairs, match_occurs):
gt_has_some_match[gt_id] |= pair_match_occurs
pr_has_some_match[pr_id] |= pair_match_occurs
match_gt_is_present = gt_is_present[match_pairs[:, 0]]
match_pr_is_present = pr_is_present[match_pairs[:, 1]]
match_gt_is_alone = (match_gt_is_present & ~match_pr_is_present)
match_pr_is_alone = (match_pr_is_present & ~match_gt_is_present)
match_indicators['either_is_present'] = np.logical_or(
match_gt_is_present, match_pr_is_present)
match_indicators['gt_is_alone_with_match'] = (
match_gt_is_alone & gt_has_some_match[match_pairs[:, 0]])
match_indicators['gt_is_alone_sans_match'] = (
match_gt_is_alone & ~gt_has_some_match[match_pairs[:, 0]])
match_indicators['pr_is_alone_with_match'] = (
match_pr_is_alone & pr_has_some_match[match_pairs[:, 1]])
match_indicators['pr_is_alone_sans_match'] = (
match_pr_is_alone & ~pr_has_some_match[match_pairs[:, 1]])
return match_indicators | function | python | 99,965 |
def wait_for_answer(self, device, request_message, timeout=30, block=True):
self.wait_api_lock.acquire()
wait_start_time = time.perf_counter()
if device is None:
raise ValueError
if request_message is None:
raise ValueError
message = None
if timeout:
response_good: bool = False
while response_good is False:
try:
queue_poll_time_sec: float = 0.1
message = self.response_queue.get(
block=block, timeout=queue_poll_time_sec
)
if str(message.gw_id) == str(device):
if int(message.req_id) == int(
request_message["data"].req_id
):
response_good = True
else:
pass
else:
pass
if self.request_queue.empty():
default_sleep_time: float = 0.1
time.sleep(default_sleep_time)
except queue.Empty:
pass
if time.perf_counter() - wait_start_time > timeout:
print(
"Error got no reply for {} in time. "
"Time waited {:.0f} secs.".format(
device, time.perf_counter() - wait_start_time
)
)
message = None
break
self.wait_api_lock.release()
return message | function | python | 99,966 |
@Test
public void databaseStorageInMemoryDB() {
Log.i(TAG, "Testing Database Storage switch over to in-memory database");
/* Get instance to access database. */
DatabaseStorage databaseStorage = DatabaseStorage.getDatabaseStorage("test-databaseStorageInMemoryDB", "test.databaseStorageInMemoryDB", 1, mSchema, new DatabaseManager.Listener() {
@Override
public boolean onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
return false;
}
@Override
public void onError(String operation, RuntimeException e) {
/* Do not handle any errors. This is simulating errors so this is expected. */
}
});
try {
runDatabaseStorageTest(databaseStorage, true);
} finally {
/* Close. */
databaseStorage.close();
}
} | function | java | 99,967 |
func (mng *Manager) correlate(r *base.Response) {
tx, ok := mng.getTx(r)
if !ok {
log.Warn("Failed to correlate response to active transaction. Dropping it.")
return
}
tx.Receive(r)
} | function | go | 99,968 |
private class CompilerResultConsumer implements IConsumer<CompilerResult>, Serializable {
/**
* the compilerMarkersContainer
*/
private final WebMarkupContainer compilerMarkersContainer;
/**
* Constructor.
* @param compilerMarkersContainer the container for the markers list
*/
public CompilerResultConsumer(final WebMarkupContainer compilerMarkersContainer) {
this.compilerMarkersContainer = compilerMarkersContainer;
}
/* (non-Javadoc)
* @see name.martingeisse.common.terms.IConsumer#consume(java.lang.Object)
*/
@Override
public void consume(final CompilerResult compilerResult) {
setCompilerMarkersFromResult(compilerResult);
AjaxRequestUtil.markForRender(compilerMarkersContainer);
}
} | class | java | 99,969 |
public void writeDiffAsXMLFile(String filePath, String pathToXSLT) {
if (pathToXSLT.isEmpty() || pathToXSLT == null) {
pathToXSLT = "bubastis_style_info.xslt";
}
try {
XMLRenderer xmlRenderer = new XMLRenderer();
xmlRenderer.writeDiffAsXMLFile(filePath, this.changeBean, pathToXSLT);
} catch (IOException e) {
System.out.println("An error occurred when attempt to write the diff as XML to a file at: " + filePath);
e.printStackTrace();
}
} | function | java | 99,970 |
func (h *Hash) HKDF(secret, salt, info []byte, length int) []byte {
if length == 0 {
length = h.OutputSize()
}
kdf := hkdf.New(h.f, secret, salt, info)
dst := make([]byte, length)
_, _ = io.ReadFull(kdf, dst)
return dst
} | function | go | 99,971 |
func mapRunesToClusterIndices(runes []rune, glyphs []shaping.Glyph) []int {
mapping := make([]int, len(runes))
glyphCursor := 0
if len(runes) == 0 {
return nil
}
rtl := len(glyphs) > 0 && glyphs[len(glyphs)-1].ClusterIndex < glyphs[0].ClusterIndex
if rtl {
glyphCursor = len(glyphs) - 1
}
for i := range runes {
for glyphCursor >= 0 && glyphCursor < len(glyphs) &&
((rtl && glyphs[glyphCursor].ClusterIndex <= i) ||
(!rtl && glyphs[glyphCursor].ClusterIndex < i)) {
if rtl {
glyphCursor--
} else {
glyphCursor++
}
}
if rtl {
glyphCursor++
} else if (glyphCursor >= 0 && glyphCursor < len(glyphs) &&
glyphs[glyphCursor].ClusterIndex > i) ||
(glyphCursor == len(glyphs) && len(glyphs) > 1) {
glyphCursor--
targetClusterIndex := glyphs[glyphCursor].ClusterIndex
for glyphCursor-1 >= 0 && glyphs[glyphCursor-1].ClusterIndex == targetClusterIndex {
glyphCursor--
}
}
if glyphCursor < 0 {
glyphCursor = 0
} else if glyphCursor >= len(glyphs) {
glyphCursor = len(glyphs) - 1
}
mapping[i] = glyphCursor
}
return mapping
} | function | go | 99,972 |
def reset_as_parent(self):
self.identifier.pop()
self.scope_depth = self.scope_depth - 1
self._set_pattern_id()
self._find_parent_module()
self.module_name = Scope.scope_to_module_name(self.identifier)
self._node_structs = list()
self._module_structs = list()
self._fragment = None
self._args_translator = None
self.init_args_translator()
self._setting = None
self._parent_module_struct = None
self._nodes_structs_formal_args_list = list()
self._node_args_translation_list = list() | function | python | 99,973 |
static class CommandId
{
public const int icmdSccCommand = 0x101;
public const int icmdViewToolWindow = 0x102;
public const int icmdToolWindowToolbarCommand = 0x103;
public const int imnuToolWindowToolbarMenu = 0x200;
public const int iiconProductIcon = 400;
public const int ibmpToolWindowsImages = 501;
public const int iconSccProviderToolWindow = 0;
} | class | c# | 99,974 |
public class Notification {
private String payload;
private String token;
private UUID uuid;
private int expiration;
/**
* Default priority is set to be 10
*/
private int priority = Constants.HIGH_PRIORITY;
private String topic;
private String collapseId;
/**
* Constructor always takes valid payload
* @param payload
*/
public Notification(String payload) {
this.payload = payload;
}
public String getPayload() {
return payload;
}
public Notification setPayload(String payload) {
this.payload = payload;
return this;
}
public String getToken() {
return token;
}
public Notification setToken(String token) {
this.token = token;
return this;
}
public UUID getUuid() {
return uuid;
}
/**
* It is not required to set UUID
* APNs automatically generates one if this field is empty
* @param uuid
* @return
*/
public Notification setUuid() {
this.uuid = UUID.randomUUID();
return this;
}
public int getExpiration() {
return expiration;
}
public Notification setExpiration(int expiration) {
this.expiration = expiration;
return this;
}
public int getPriority() {
return priority;
}
public Notification setPriority(int priority) {
this.priority = priority;
return this;
}
public String getTopic() {
return topic;
}
public Notification setTopic(String topic) {
this.topic = topic;
return this;
}
public String getCollapseId() {
return collapseId;
}
public Notification setCollapseId(String collapseId) {
this.collapseId = collapseId;
return this;
}
} | class | java | 99,975 |
@Test
public void testMarkerMapping() {
setTestFile("short-sample-drop.flextext");
setProperties("properties/map-annotation.properties");
start();
SDocumentGraph graph = getFixture().getSaltProject().getCorpusGraphs().get(0).getDocuments().get(0)
.getDocumentGraph();
assertNotNull(graph);
/*
* Check that annotations have been dropped according
* to property
*/
for (SNode node : graph.getNodes()) {
for (SLayer l : node.getLayers()) {
if (l.getName().equals("morph")) {
assertThat(node.getAnnotation("en::morphenhn"), is(notNullValue()));
assertThat(node.getAnnotation("fr::frgls"), is(notNullValue()));
assertThat(node.getAnnotation("fr::morphdro"), is(notNullValue()));
assertThat(node.getAnnotation("en::morphdro"), is(notNullValue()));
assertThat(node.getAnnotation("en::yyy"), is(notNullValue()));
}
else if (l.getName().equals("phrase")) {
assertThat(node.getAnnotation("fr::frgls"), is(notNullValue()));
assertThat(node.getAnnotation("de::yyy"), is(notNullValue()));
}
else if (l.getName().equals("word")) {
assertThat(node.getAnnotation("fr::yyy"), is(notNullValue()));
}
}
}
} | function | java | 99,976 |
public class DoubleLabel implements PropertyRenderer {
public Component getRendererComponent(final Property property, Annotation annotation) {
final ValueModel model = new PropertyAdapter(property, "value", true);
JPanel panel = new JPanel(new BorderLayout());
final JLabel label = new JLabel();
final DecimalFormat format = new DecimalFormat("#.###");
model.addValueChangeListener(new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent evt) {
label.setText(format.format(property.getValue()));
}
});
panel.addPropertyChangeListener(new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent evt) {
if ("foreground".equals(evt.getPropertyName())) {
label.setForeground((Color) evt.getNewValue());
} else if ("background".equals(evt.getPropertyName())) {
label.setBackground((Color) evt.getNewValue());
}
}
});
panel.add(label, BorderLayout.CENTER);
return panel;
// return BasicComponentFactory.createLabel(model, new DecimalFormat("#.###"));
}
} | class | java | 99,977 |
@Component("blSkuRestrictionFactory")
public class SkuRestrictionFactoryImpl implements RestrictionFactory {
@Resource(name="blRestrictionFactory")
protected RestrictionFactory delegate;
protected static final String DEFAULT_SKU_PATH_PREFIX = "product.defaultSku.";
protected String skuPropertyPrefix;
@Override
public Restriction getRestriction(final String type, String propertyId) {
final Restriction delegateRestriction = delegate.getRestriction(type, propertyId);
return new Restriction()
.withFilterValueConverter(delegateRestriction.getFilterValueConverter())
.withPredicateProvider(new PredicateProvider() {
@Override
public Predicate buildPredicate(CriteriaBuilder builder, FieldPathBuilder fieldPathBuilder,
From root, String ceilingEntity, String fullPropertyName,
Path explicitPath, List directValues) {
FieldPath fieldPath = fieldPathBuilder.getFieldPath(root, fullPropertyName);
if ((StringUtils.isNotEmpty(skuPropertyPrefix) && fullPropertyName.startsWith(skuPropertyPrefix))
|| CollectionUtils.isEmpty(fieldPath.getAssociationPath())) {
Path targetPropertyPath = fieldPathBuilder.getPath(root, fieldPath, builder);
Path defaultSkuPropertyPath = fieldPathBuilder.getPath(root,
DEFAULT_SKU_PATH_PREFIX + fullPropertyName, builder);
Path productPath = fieldPathBuilder.getPath(root, "product", builder);
Predicate propertyExpression;
Predicate defaultSkuExpression;
if (delegateRestriction.getPredicateProvider() instanceof LikePredicateProvider) {
propertyExpression = builder.like(builder.lower(targetPropertyPath),
((String) directValues.get(0)).toLowerCase());
defaultSkuExpression = builder.like(builder.lower(defaultSkuPropertyPath),
((String) directValues.get(0)).toLowerCase());
} else if (delegateRestriction.getPredicateProvider() instanceof IsNullPredicateProvider) {
propertyExpression = builder.isNull(targetPropertyPath);
defaultSkuExpression = builder.isNull(defaultSkuPropertyPath);
} else if (delegateRestriction.getPredicateProvider() instanceof BetweenDatePredicateProvider) {
if (directValues.size() == 2) {
if (directValues.get(0) == null) {
propertyExpression = builder.lessThan(targetPropertyPath, (Comparable) directValues.get(1));
defaultSkuExpression = builder.lessThan(defaultSkuPropertyPath, (Comparable) directValues.get(1));
} else if (directValues.get(1) == null) {
propertyExpression = builder.greaterThanOrEqualTo(targetPropertyPath,
(Comparable) directValues.get(0));
defaultSkuExpression = builder.greaterThanOrEqualTo(defaultSkuPropertyPath,
(Comparable) directValues.get(0));
} else {
propertyExpression = builder.between(targetPropertyPath, (Comparable) directValues.get(0),
(Comparable) directValues.get(1));
defaultSkuExpression = builder.between(defaultSkuPropertyPath, (Comparable) directValues.get(0),
(Comparable) directValues.get(1));
}
} else {
propertyExpression = builder.equal(targetPropertyPath, directValues.get(0));
defaultSkuExpression = builder.equal(defaultSkuPropertyPath, directValues.get(0));
}
} else if (delegateRestriction.getPredicateProvider() instanceof BetweenPredicateProvider) {
if (directValues.size() > 1) {
propertyExpression = builder.between(targetPropertyPath, (Comparable) directValues.get(0),
(Comparable) directValues.get(1));
defaultSkuExpression = builder.between(defaultSkuPropertyPath, (Comparable) directValues.get(0),
(Comparable) directValues.get(1));
} else {
propertyExpression = builder.equal(targetPropertyPath, directValues.get(0));
defaultSkuExpression = builder.equal(defaultSkuPropertyPath, directValues.get(0));
}
} else if (delegateRestriction.getPredicateProvider() instanceof CollectionSizeEqualPredicateProvider) {
propertyExpression = builder.equal(builder.size(targetPropertyPath), directValues.get(0));
defaultSkuExpression = builder.equal(builder.size(defaultSkuPropertyPath), directValues.get(0));
} else if (delegateRestriction.getPredicateProvider() instanceof EqPredicateProvider) {
propertyExpression = targetPropertyPath.in(directValues);
defaultSkuExpression = defaultSkuPropertyPath.in(directValues);
} else {
throw new IllegalArgumentException("Unknown PredicateProvider instance: " +
delegateRestriction.getPredicateProvider().getClass().getName());
}
return buildCompositePredicate(builder, targetPropertyPath, productPath, propertyExpression,
defaultSkuExpression);
}
return delegateRestriction.getPredicateProvider().buildPredicate(builder, fieldPathBuilder, root,
ceilingEntity, fullPropertyName, explicitPath, directValues);
}
});
}
protected Predicate buildCompositePredicate(CriteriaBuilder builder, Path targetPropertyPath, Path productPath,
Predicate propertyExpression, Predicate defaultSkuExpression) {
return builder.or(
builder.or(
builder.and(builder.isNotNull(targetPropertyPath), propertyExpression),
builder.and(
builder.and(
builder.isNull(targetPropertyPath),
builder.isNotNull(productPath)
), defaultSkuExpression
)
), builder.and(builder.isNull(productPath), propertyExpression)
);
}
public RestrictionFactory getDelegate() {
return delegate;
}
public void setDelegate(RestrictionFactory delegate) {
this.delegate = delegate;
}
public String getSkuPropertyPrefix() {
return skuPropertyPrefix;
}
public void setSkuPropertyPrefix(String skuPropertyPrefix) {
if (StringUtils.isNotEmpty(skuPropertyPrefix) && !skuPropertyPrefix.endsWith(".")) {
skuPropertyPrefix += ".";
}
this.skuPropertyPrefix = skuPropertyPrefix;
}
} | class | java | 99,978 |
func bgRun(cmd *exec.Cmd) {
cmd.Stdin = nil
cmd.Stderr = os.Stderr
cmd.Stdout = os.Stderr
if err := cmd.Start(); err != nil {
log.Fatalf("%+v: %s", cmd.Args, err)
}
if _, err := cmd.Process.Wait(); err != nil {
log.Fatalf("%+v: %s", cmd.Args, err)
}
} | function | go | 99,979 |
def check_conflict_on_del(self, session, _id, db_content):
if session["force"]:
return
_filter = self._get_project_filter(session)
_filter["_admin.nst-id"] = _id
if self.db.get_list("nsis", _filter):
raise EngineException("there is at least one Netslice Instance using this descriptor",
http_code=HTTPStatus.CONFLICT) | function | python | 99,980 |
def print_forecast(forecaster: 'Forecaster') -> None:
forecast: 'Forecast' = forecaster.get_forecast()
location = forecast.get_location()
ansi_bold: str = '\033[1m'
ansi_yellow: str = '\033[33m'
ansi_reset: str = '\033[0m'
for weather in forecast:
rain_length, rainfall = len(weather.get_rain()), '0'
snow_length, snowfall = len(weather.get_snow()), '0'
if rain_length > 0:
rainfall = weather.get_rain()['3h']
if snow_length > 0:
snowfall = weather.get_snow()['3h']
temp_min = weather.get_temperature(unit='celsius')['temp_min']
temp_max = weather.get_temperature(unit='celsius')['temp_max']
temperature = temp_min if temp_min == temp_max else "{} - {}".format(temp_min, temp_max)
print(f"\n{ansi_bold}{location.get_name()} at "
f"{weather.get_reference_time('iso')}{ansi_reset}"
f"\n\tDescription:\t\t{ansi_yellow}{weather.get_detailed_status()}{ansi_reset}"
f"\n\tTemperature (°C):\t{ansi_yellow}{temperature}{ansi_reset}"
f"\n\tWind Speed (m/s):\t{ansi_yellow}{weather.get_wind()['speed']}{ansi_reset}"
f"\n\tRainfall (cm):\t\t{ansi_yellow}{rainfall}{ansi_reset}"
f"\n\tSnowfall (cm):\t\t{ansi_yellow}{snowfall}{ansi_reset}")
forecasts.append({saved_data_fields['loc']: location.get_name(),
saved_data_fields['time']: weather.get_reference_time('iso'),
saved_data_fields['desc']: weather.get_detailed_status(),
saved_data_fields['temp_min']: weather.get_temperature(unit='celsius')[
'temp_min'],
saved_data_fields['temp_max']: weather.get_temperature(unit='celsius')[
'temp_max'],
saved_data_fields['wind']: weather.get_wind()['speed'],
saved_data_fields['rain']: rainfall,
saved_data_fields['snow']: snowfall})
print(f'\n{ansi_bold}========================================{ansi_reset}\n') | function | python | 99,981 |
public string ImageinfoToCsv(TW_IMAGEINFO a_twimageinfo)
{
try
{
CSV csv = new CSV();
csv.Add(((double)a_twimageinfo.XResolution.Whole + ((double)a_twimageinfo.XResolution.Frac / 65536.0)).ToString());
csv.Add(((double)a_twimageinfo.YResolution.Whole + ((double)a_twimageinfo.YResolution.Frac / 65536.0)).ToString());
csv.Add(a_twimageinfo.ImageWidth.ToString());
csv.Add(a_twimageinfo.ImageLength.ToString());
csv.Add(a_twimageinfo.SamplesPerPixel.ToString());
csv.Add(a_twimageinfo.BitsPerSample_0.ToString());
csv.Add(a_twimageinfo.BitsPerSample_1.ToString());
csv.Add(a_twimageinfo.BitsPerSample_2.ToString());
csv.Add(a_twimageinfo.BitsPerSample_3.ToString());
csv.Add(a_twimageinfo.BitsPerSample_4.ToString());
csv.Add(a_twimageinfo.BitsPerSample_5.ToString());
csv.Add(a_twimageinfo.BitsPerSample_6.ToString());
csv.Add(a_twimageinfo.BitsPerSample_7.ToString());
csv.Add(a_twimageinfo.Planar.ToString());
csv.Add("TWPT_" + (TWPT)a_twimageinfo.PixelType);
csv.Add("TWCP_" + (TWCP)a_twimageinfo.Compression);
return (csv.Get());
}
catch (Exception exception)
{
Log.Error("***error*** - " + exception.Message);
return ("***error***");
}
} | function | c# | 99,982 |
long
wtimer_granularity (void)
{
#ifdef TIMER_GETTIMEOFDAY
return (1);
#endif
#ifdef TIMER_TIME
return (1000);
#endif
#ifdef TIMER_WINDOWS
return (1);
#endif
} | function | c | 99,983 |
private void ReadRecordTable(int pos, int numEntries, BinaryReader reader, StreamWriter outStream)
{
this.recordInfo = new Dictionary<string, RecordInfo>((int)Math.Round(numEntries * 1.2));
reader.BaseStream.Seek(pos, SeekOrigin.Begin);
if (outStream != null)
{
outStream.WriteLine("RecordTable located at 0x{0:X}", pos);
}
for (int i = 0; i < numEntries; ++i)
{
RecordInfo recordInfo = new RecordInfo();
recordInfo.Decode(reader, 24, this);
this.recordInfo.Add(TQData.NormalizeRecordPath(recordInfo.ID), recordInfo);
if (outStream != null)
{
outStream.WriteLine("{0},{1},{2}", i, recordInfo.ID, recordInfo.RecordType);
}
}
} | function | c# | 99,984 |
public class SpaceAlreadyStoppedException extends SpaceUnavailableException {
private static final long serialVersionUID = 3487826802841980532L;
/**
* Constructs a <code>SpaceAlreadyStoppedException</code> with the specified detail message.
*
* @param s - the detail message
*/
public SpaceAlreadyStoppedException(String spaceName, String s) {
super(spaceName, s);
}
} | class | java | 99,985 |
def seek(self,offset,whence=0):
assert whence in (0, 1, 2)
if whence == 0:
realoffset = self.firstbyte + offset
elif whence == 1:
realoffset = self.realpos + offset
elif whence == 2:
raise IOError('seek from end of file not supported.')
if self.lastbyte and (realoffset >= self.lastbyte):
realoffset = self.lastbyte
self._do_seek(realoffset - self.realpos) | function | python | 99,986 |
int nsm_monitor(const struct nlm_host *host)
{
struct nsm_handle *nsm = host->h_nsmhandle;
struct nsm_res res;
int status;
dprintk("lockd: nsm_monitor(%s)\n", nsm->sm_name);
if (nsm->sm_monitored)
return 0;
nsm->sm_mon_name = nsm_use_hostnames ? nsm->sm_name : nsm->sm_addrbuf;
status = nsm_mon_unmon(nsm, NSMPROC_MON, &res, host);
if (unlikely(res.status != 0))
status = -EIO;
if (unlikely(status < 0)) {
pr_notice_ratelimited("lockd: cannot monitor %s\n", nsm->sm_name);
return status;
}
nsm->sm_monitored = 1;
if (unlikely(nsm_local_state != res.state)) {
nsm_local_state = res.state;
dprintk("lockd: NSM state changed to %d\n", nsm_local_state);
}
return 0;
} | function | c | 99,987 |
func IsTryingToTraverseUp(s string) bool {
sep := string(filepath.Separator)
for _, fragment := range strings.Split(s, sep) {
if ".." == fragment {
return true
}
}
return false
} | function | go | 99,988 |
fn search(&self, chars: &[char]) -> bool {
let mut cur = self;
// let mut chs:Vec<char> = word.chars().collect();
for id in 0..chars.len() {
// let next = cur.child.get(&i);
if chars[id] != '.' {
let i = chars[id] as usize - 'a' as usize;
//如果存在,就继续往下找
if let Some(next_d) = cur.child.get(&i) {
cur = next_d;
continue;
}
// 找不到 ,而且 不是 通配符 . ,就可以直接返回了
if i <= 25 {
return false;
}
} else {
for (_, v) in cur.child.iter() {
if v.search(&chars[id + 1..]) {
return true;
}
}
}
return false;
}
return cur.is_end;
} | function | rust | 99,989 |
func (iac *InstanceAdminClient) UpdateAppProfile(ctx context.Context, instanceID, profileID string, updateAttrs ProfileAttrsToUpdate) error {
ctx = mergeOutgoingMetadata(ctx, withGoogleClientInfo(), iac.md)
profile := &btapb.AppProfile{
Name: "projects/" + iac.project + "/instances/" + instanceID + "/appProfiles/" + profileID,
}
if updateAttrs.Description != nil {
profile.Description = optional.ToString(updateAttrs.Description)
}
if updateAttrs.RoutingPolicy != nil {
switch optional.ToString(updateAttrs.RoutingPolicy) {
case MultiClusterRouting:
profile.RoutingPolicy = &btapb.AppProfile_MultiClusterRoutingUseAny_{
MultiClusterRoutingUseAny: &btapb.AppProfile_MultiClusterRoutingUseAny{},
}
case SingleClusterRouting:
profile.RoutingPolicy = &btapb.AppProfile_SingleClusterRouting_{
SingleClusterRouting: &btapb.AppProfile_SingleClusterRouting{
ClusterId: updateAttrs.ClusterID,
AllowTransactionalWrites: updateAttrs.AllowTransactionalWrites,
},
}
default:
return errors.New("invalid routing policy")
}
}
patchRequest := &btapb.UpdateAppProfileRequest{
AppProfile: profile,
UpdateMask: &field_mask.FieldMask{
Paths: updateAttrs.GetFieldMaskPath(),
},
IgnoreWarnings: updateAttrs.IgnoreWarnings,
}
updateRequest, err := iac.iClient.UpdateAppProfile(ctx, patchRequest)
if err != nil {
return err
}
return longrunning.InternalNewOperation(iac.lroClient, updateRequest).Wait(ctx, nil)
} | function | go | 99,990 |
async function verifyResults(library) {
const { metaPath, resultsPath } = library;
if (!fs.existsSync(join(metaPath, "issues.json")))
throw new Error("Missing issues.json");
if (!fs.existsSync(join(metaPath, "summary.md")))
throw new Error("Missing summary.md");
if (!fs.existsSync(join(resultsPath, "results.json")))
throw new Error("Missing results.json");
if (!fs.existsSync(join(resultsPath, "results.html")))
throw new Error("missing results.html");
} | function | javascript | 99,991 |
def extract_model(self, program_name):
_method_name = 'extract_model'
self.__logger.entering(program_name, class_name=self.__class_name, method_name=_method_name)
try:
tmp_model_dir = FileUtils.createTempDirectory(program_name)
tmp_model_file = None
for archive_file in self.__archive_files[::-1]:
tmp_model_file = archive_file.extractModel(tmp_model_dir)
if tmp_model_file:
break
except (IllegalArgumentException, IllegalStateException, WLSDeployArchiveIOException), archex:
ex = exception_helper.create_cla_exception('WLSDPLY-20010', program_name, self.__archive_files_text,
archex.getLocalizedMessage(), error=archex)
self.__logger.throwing(ex, class_name=self.__class_name, method_name=_method_name)
raise ex
self.__logger.exiting(class_name=self.__class_name, method_name=_method_name,
result=(tmp_model_dir, tmp_model_file))
return tmp_model_dir, tmp_model_file | function | python | 99,992 |
static Oid
GetOperatorByType(Oid typeId, Oid accessMethodId, int16 strategyNumber)
{
Oid operatorClassId = GetDefaultOpClass(typeId, accessMethodId);
Oid operatorFamily = get_opclass_family(operatorClassId);
Oid operatorId = get_opfamily_member(operatorFamily, typeId, typeId, strategyNumber);
return operatorId;
} | function | c | 99,993 |
def define_fc_layer(fc_dict, last_block=False):
in_features = fc_dict["in_features"]
out_features = fc_dict["out_features"]
if last_block:
fc_block = [nn.Linear(in_features, out_features)]
else:
fc_block = [
nn.Linear(in_features, out_features),
nn.LeakyReLU()
]
return nn.Sequential(*fc_block) | function | python | 99,994 |
function obify_(str, pack) {
var ob = null;
try {
ob = str ? JSON.parse(str) : null;
}
catch (err) {
ns.errify(false, ns.settings.errors.INTERNAL, "data in exchange was invalid" + str, pack);
}
return ob;
} | function | javascript | 99,995 |
private List<MarketDataNode> dependencyNodes(MarketDataRequirements requirements) {
List<MarketDataNode> observableNodes =
buildNodes(requirements.getObservables(), MarketDataNode.DataType.SINGLE_VALUE);
List<MarketDataNode> nonObservableNodes =
buildNodes(requirements.getNonObservables(), MarketDataNode.DataType.SINGLE_VALUE);
List<MarketDataNode> timeSeriesNodes =
buildNodes(requirements.getTimeSeries(), MarketDataNode.DataType.TIME_SERIES);
return ImmutableList.<MarketDataNode>builder()
.addAll(observableNodes)
.addAll(nonObservableNodes)
.addAll(timeSeriesNodes)
.build();
} | function | java | 99,996 |
def interpret(marker, execution_context=None):
try:
expr, rest = parse_marker(marker)
except Exception as e:
raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e))
if rest and rest[0] != '#':
raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest))
context = DEFAULT_MARKER_CONTEXT.copy()
if execution_context:
context.update(execution_context)
return evaluator.evaluate(expr, context) | function | python | 99,997 |
@Override
public boolean isAllowedToAllocateMips(List<Double> vmRequestedMipsShare) {
final double pmMips = getPeCapacity();
double totalRequestedMips = 0;
for (final double vmMips : vmRequestedMipsShare) {
if (vmMips > pmMips) {
return false;
}
totalRequestedMips += vmMips;
}
if (getAvailableMips() < totalRequestedMips || getWorkingPeList().size() < vmRequestedMipsShare.size()) {
return false;
}
return true;
} | function | java | 99,998 |
func InstallOrUpdateApp(ctx context.Context, a *arc.ARC, d *ui.Device, pkgName string, tryLimit int) error {
installed, err := a.PackageInstalled(ctx, pkgName)
if err != nil {
return err
}
if !installed {
return InstallApp(ctx, a, d, pkgName, tryLimit)
}
testing.ContextLog(ctx, "App has already been installed; check if an update is available")
return installOrUpdate(ctx, a, d, pkgName, tryLimit, updateApp)
} | function | go | 99,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.