content
stringlengths 7
2.61M
|
---|
class LongestPalindrome {
public int longestPalindrome(String s) {
String str[] = s.split("");
int count = 0;
int n = 0;
Map<String, Integer> tm = new TreeMap<String, Integer>();
for (int i = 0; i < str.length; i++) {
if (tm.containsKey(str[i])) {
tm.put(str[i], tm.get(str[i]) + 1);
} else {
tm.put(str[i], 1);
}
}
System.out.println(tm);
for (String e : tm.keySet()) {
if (tm.get(e) % 2 == 0) {
count = count + tm.get(e);
} else {
n++;
count = count + tm.get(e)-1;
}
}
if (n >= 1) {
return count+1;
}
return count;
}
} |
/**
* Converts the given {@link Key} to a {@link KeyCombination}.
*
* @param pKey the {@link Key} to convert.
* @return the {@link KeyCombination}.
*/
public static KeyCombination keyToKeyCombination(Key pKey)
{
if (pKey == null)
{
return null;
}
KeyCode keyCode = keyToKeyCode(pKey);
ModifierValue altModifier = ModifierValue.UP;
ModifierValue ctrlModifier = ModifierValue.UP;
ModifierValue shiftModifier = ModifierValue.UP;
if ((pKey.getModifiers() & UIKeyEvent.ALT_MASK) == UIKeyEvent.ALT_MASK)
{
altModifier = ModifierValue.DOWN;
}
if ((pKey.getModifiers() & UIKeyEvent.CTRL_MASK) == UIKeyEvent.CTRL_MASK)
{
ctrlModifier = ModifierValue.DOWN;
}
if ((pKey.getModifiers() & UIKeyEvent.SHIFT_MASK) == UIKeyEvent.SHIFT_MASK)
{
shiftModifier = ModifierValue.DOWN;
}
return new KeyCodeCombination(keyCode, ctrlModifier, shiftModifier, altModifier, ModifierValue.UP, ModifierValue.UP);
} |
package cn.mydoudou.singleton;
/**
* @author fut
* @description 懒汉模式(线程安全)
* 但是每次调用getInstance(),都要对对象上锁,造成不必要的同步开销,效率极低
* @create 2018-09-22
* @wiki
*/
public class SyncSingleton {
/**
* 持有私有静态实例,防止被引用,此处赋值为null,目的是实现延迟加载
*/
private static SyncSingleton instance = null;
/**
* 私有构造方法,防止被实例化
*/
private SyncSingleton() {
}
/**
* 静态工程方法,创建实例
*/
public static synchronized SyncSingleton getInstance() {
if (instance == null) {
instance = new SyncSingleton();
}
return instance;
}
/**
* 如果该对象被用于序列化,可以保证对象在序列化前后保持一致
*/
public Object readResolve() {
return instance;
}
}
|
<filename>include/RED4ext/Scripting/Natives/Generated/game/state/MachineplayeractionsLocomotionAir.hpp
#pragma once
// This file is generated from the Game's Reflection data
#include <cstdint>
#include <RED4ext/Common.hpp>
#include <RED4ext/Scripting/Natives/Generated/game/state/MachineplayeractionsLocomotionSimple.hpp>
namespace RED4ext
{
namespace game::state {
struct MachineplayeractionsLocomotionAir : game::state::MachineplayeractionsLocomotionSimple
{
static constexpr const char* NAME = "gamestateMachineplayeractionsLocomotionAir";
static constexpr const char* ALIAS = NAME;
};
RED4EXT_ASSERT_SIZE(MachineplayeractionsLocomotionAir, 0x150);
} // namespace game::state
} // namespace RED4ext
|
1620 EARLY EFFECTS OF STHEPIOZOTOCIM DIABETES (SD) ON RAT GLOMERULAR FILTRATIOM RATE (GFR) AND PROSTAGLANDIN E2 SYNTHESIS (PGE2) In SD the development of moderate hyperglycemia is closely associated with increases in GER that way be mediated by altered production of or reactivity to vasoactive substances. In the present studies renal cortical and medullary PGE2 synthesis and whole kidney GFR (CIN) were tamrinri 8 days after induction of SD (45 mg/kg i.v.) in young rats (139±1.7 g) treated with either daily aspirin (ASA) 300 mg/kg/day or ASA vehicle. PGE2 synthesis from 14C-arachidonic acid was determined in cortical and medullary microsones by thin layer chromatography. Mean values (± 1SE) for body weight (BW), blood pressure (bp), plasma glucose (Pglu, mg/dl), CIN (ml/min/g KW), and PGE2 synthesis (ng/mg/30 min) are shown below: *p<0.05, compared to group above; p<0.05, compared to control (C).Weight gain in SD and SD/ASA rats was less than in C and C/ASA. After 8 days of SD PGE2 synthesis and GFR were increased compared to control values. Eight days of PG inhibition reduced PGE2 synthesis by 40% and GSR by 25% in SD/ASA compared to SD rats. ASA given to control rats reduced PGE2 synthesis but had no effect on GFR. These data suggest that hyperfiltration observed in moderately hyperglycemic SD rats may be mediated by elevated rates of prostaglandin synthesis. Whether chronic alterations in prostaglandin synthesis influence the onset and progression of glomerular histologic changes remain unknown. |
ILOILO CITY — Photographs of nine human rights lawyers in Panay depicting them as “lawyers of terrorists” and members of the New People’s Army were posted on electric posts in Iloilo City on Tuesday.
Also included in the posters were Reylan Vergara, national vice chair of the human rights group Karapatan, and Jose Ely Garachico, a paralegal volunteer of Karapatan and NUPL in Panay.
NUPL-Panay, in a statement, said putting up these posters was “an old trick” to shame or silence government critics.
“Worse, this practice is yet preparatory or to be [used] as excuse and menu for public mind-conditioning to unleash armed attack … on the subjects of their vilification campaign,” NUPL-Panay said.
The posters emerged a day after the holding of protest actions to mark International Human Rights Day and after lawyers in Iloilo had formed an alliance against human rights violations and continued attacks on them.
At least 34 judges, prosecutors, and lawyers have been killed since President Rodrigo Duterte assumed office in July 2016, according to NUPL.
The latest victim was human rights lawyer Benjamin Ramos Jr., who was shot and killed by gunmen on a motorcycle in Sipalay City on Nov. 6.
Abdiel Dan Elijah Fajardo, IBP national president, said the group was alarmed with the harassment and intimidation of lawyers occurring with impunity.
“We can no longer stand idly by and allow this to continue,” Fajardo said in a statement sent to the Inquirer.
Several Iloilo lawyers earlier formed the Lawyers Alliance in Defense of the Legal Profession and the Rule of Law in response to the killings and human rights violations. The group committed to help victims of human rights violations.
The group said those merely suspected or accused of crimes were being denied due process and killed.
”… Priests and lawyers [are being] gunned down in [the] line of duty. People are being persecuted for their beliefs and advocacies. Government bureaucracy is gradually militarized and those critical of the Duterte administration are vilified, ostracized, harassed and even attacked physically,” the group said in a resolution.
As citizens and members of the legal profession, the lawyers said they had the obligation to speak out and address the violation of rights and trampling of freedoms.
One of those in the posters, Jose Ely Garachico, was shot and wounded when armed men attacked him in Oton town on April 12, 2007.
They left Garachico for dead and abducted two activists, Ma. Luisa Posa-Dominado and Nilo Arado, who remained missing. |
Experience of individual computer system developments in the field of vestibular science and suggestions of organizational supports for their wide distribution for wide distribution of developed systems in the future. The 4 computer systems developed by the author were introduced: 1) ABR data process-ing software; 2) Eye-head coordination analysis software; 3) Universal ENG data analysis software; 4) Web software for drawing nystagmograms. The former 3 systems could not be distrib-uted widely, while the latter web software can be accessed by anyone via the internet. How-ever, for this system to be used as a tool for a large-scale study of the relationships between nystagmus findings and diseases, standardization of the drawing convention is mandatory. Although the field of vestibular science is appropriate for personal system development, supports of certain organizations are necessary for wide distribution of the developed systems. |
<reponame>yuce/hazelcast-cpp-client
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <string>
#include "hazelcast/client/config/config_pattern_matcher.h"
#include "hazelcast/util/SynchronizedMap.h"
namespace hazelcast {
namespace client {
namespace internal {
namespace config {
/**
* Utility class to access configuration.
*/
class ConfigUtils {
public:
template<typename T>
static const T *
lookup_by_pattern(const client::config::config_pattern_matcher &config_pattern_matcher,
const std::unordered_map<std::string, T> &config_patterns, const std::string &item_name) {
auto candidate = config_patterns.find(item_name);
if (candidate != config_patterns.end()) {
return &candidate->second;
}
auto size = config_patterns.size();
std::vector<std::string> keys(size);
size_t index = 0;
for (const auto &e : config_patterns) {
keys[index] = e.first;
}
std::shared_ptr<std::string> configPatternKey = config_pattern_matcher.matches(
keys, item_name);
if (configPatternKey) {
candidate = config_patterns.find(*configPatternKey);
if (candidate != config_patterns.end()) {
return &candidate->second;
}
}
return nullptr;
}
};
}
}
}
};
|
OSLO, April 5 (Reuters) - A Brazilian federal court has scheduled an April 12 hearing on the year-long production embargo at Norsk Hydro’s Alunorte alumina refinery in Brazil, the Norwegian company said on Friday.
There is no timeline yet for when a decision will be made on whether to lift the embargo, Hydro said in a statement.
The largest of its kind outside China, Alunorte transforms bauxite from mines in Brazil into alumina, the key material used for making aluminium at smelters owned by Hydro and others around the world.
Since the unlicensed emission of untreated water during severe rains in February last year, Hydro has upgraded Alunorte’s facilities to help convince authorities it was safe to resume full output.
On March 26, Hydro agreed with Brazil’s federal prosecutor to have a third-party technical assessment of two reports backing Hydro’s position that the plant’s problems had been fixed. |
<reponame>lifedreams/graphql-java
package graphql.language;
import graphql.PublicApi;
import graphql.util.TraversalControl;
import graphql.util.TraverserContext;
@PublicApi
public class AstTransformerUtil {
/**
* Helper method to be used inside a {@link NodeVisitor} to actually a change a node.
* <p>
* It generates a new {@link AstZipper} and replaces the current accumulated {@link AstMultiZipper} including
* the new {@link AstZipper}.
*
* @param context the context in play
* @param changedNode the changed node
*
* @return traversal control to allow for a more fluent coding style
*/
public static TraversalControl changeNode(TraverserContext<Node> context, Node changedNode) {
AstZipper zipperWithChangedNode = context.getVar(AstZipper.class).withNewNode(changedNode);
AstMultiZipper multiZipper = context.getCurrentAccumulate();
context.setAccumulate(multiZipper.withNewZipper(zipperWithChangedNode));
context.changeNode(changedNode);
return TraversalControl.CONTINUE;
}
}
|
The following sites are fake news. And I don't mean fake news as in opinionated pieces, I mean actually 100% fake news. Don't fall for manufactured outrage.
Isn't that "BlackOpinion.co.za" the site of "Black First, Land First" by Andile Mngxitama and his friends?
German news magazine Der Spiegel has sacked an award-winning staff writer after accusing him of inventing details and quotes in numerous stories.
Claas Relotius "falsified articles on a grand scale and even invented characters", Der Spiegel said.
Among the articles in question are major features that had been nominated for or won awards, the magazine added.
Mr Relotius, 33, admitted deceiving readers in some 14 stories published in Der Spiegel, the magazine said.
An investigation into a story by Mr Relotius about immigration and the US-Mexican border revealed that he had fabricated information about seeing a hand-painted sign in a town in Minnesota that read: "Mexicans Keep Out."
Fraudulent information appeared in other stories including one about inmates at the US military prison at Guantanamo Bay and another about the US NFL quarterback Colin Kaepernick.
It's as if they WANT the fake news to be true, even when told the groundbreaking news flash they just shared on FB is fake.
CNN..... After 2 years of pushing and publicising how confident are they in RussiaGate and they have evidences and "witnesses" - last Friday they have been left red faced (or not really) after the Mueller report concluded there was no collusion between the orange man and Russia..
Still no apology to their "fateful" supports..
At last Orange man was right calling them "Fake News".
Not to touch-white helmets, Gas attacks and so on..
CNN has gone down the toilet in recent years.
As have many mainstream news sources. It's a different world now, and the war against Fake News is not going away any time soon. The credibility of journalists has now become immensely important, and the rule of thumb is to question everything and deman corroboration. |
Strategic Approach to Coping Scale: Psychometric Characteristics of the Russian Version of Technique Coping strategies are an important factor for a human psychological well-being recon-struction. Theoretical literature analysis revealed that the identified coping mechanisms are relatively few comparing to the number of publications about their measurement. At the same time, in Russian psychology there is no adequate toolkit for measuring the coping behavior and human individual resources. In this regard, it is very important to adapt, validate already existing and well-established foreign methods. The scales rates have been established taking into account the multicultural and gender characteristics of the sample. The representativeness of the sample presented makes the resulting norms more correct in assessing the significance of the overcoming behavior strategy. Validity verification of the questionnaire allows speaking about the discrepancy between the received factor structure of the methodology and its author's model. To overcome these discrepancies, some questionnaire items need to be reformulated. The Russian-language version of the technique has good construct validity. The test is capable to measure overcoming behavior strategies. Coping strategies are predictably associated with the level of conscious self-regulation scales (planning, programming, flexibility, evaluation of results) and socially significant personal dispositions (to be close to others, to help them, to empathize and to rejoice in successes, to be conscientious, responsible, and cautious in work). In general, the results of the evaluation of psychometric indices of the SACS methodology showed acceptable internal consistency of the scales of the Russian-language version of the methodology, confirmed structural validity, refined test norms, which allows us talking about the possibilities of using this technique in psychological studies. |
The Christian group that backed an electrician in his battle to display a crucifix in his company van says it now has more than 50 similar cases on its books.
The Christian Legal Centre, which represented electrician Colin Atkinson, says it is receiving up to five calls a day from Christians seeking to take action against their employers whom they feel are failing to respect their faith.
The dispute over the crucifix, between Atkinson and Wakefield District Housing, was transformed into a front-page row and hijacked by the far right. Wakefield and District Housing found itself vilified, with death threats made to staff and more than 1,000 abusive emails sent to them. The British National party picketed its offices.
The matter was finally settled last week when it was agreed that Atkinson could keep a cross in the van, but out of the public eye. Supporters of the housing association expressed surprise that a policy prohibiting staff from displaying all personal items in company vehicles had sparked a religious row. "This is a quintessentially modern story of a voluntary sector organisation trying to do the right thing by its staff and tenants only to be misrepresented by the tabloid press and attacked by shadowy groups and rabble rousers from the BNP," said David Orr, chief executive of the National Housing Federation.
But Andrea Williams, director of the Christian Legal Centre, said that her organisation would continue to contest policies it viewed as anti-Christian.
"Christians across the centuries have been prepared to lose their lives for their faith by standing up for what they believe in because they love Jesus Christ," she said. "The Christian Legal Centre will not allow Christianity to be eliminated from the public sphere or to be silenced or sidelined."
Questions have been asked about from where the centre – and its sister organisation, Christian Concern For Our Nation – obtain funding. Accounts show both organisations have little in the way of income.
Williams said all of the centre's work was done on a pro bono basis by committed Christian lawyers and that what money it had came in small donations from more than 30,000 people who received its regular email updates. "We never ask clients for money," she said. "Very often they fear losing their case and having to pay the costs of the other side. Part of our ministry is to ensure they are not burdened with that."
Close observers of the centre believe it is adopting the tactics of wealthy US evangelical groups, notably the powerful Alliance Defence Fund, which, through its Blackstone Legal Fellowship, trains an army of Christian lawyers to defend religious freedom "through strategy, training, funding and direct litigation".
The ADF, which according to filings had an income of almost $40m last year, is funded by prominent benefactors including Erik Prince, founder of the Blackwater private security giant, the Covenant Foundation, which is financed by a leading member of the Texas Christian right, James Leininger, and the Bolthouse Foundation, a charity that rejects evolution, insisting "man was created by a direct act of God in His image, not from previously existing creatures".
The ADF has joined forces with the Christian Legal Centre and Christian Concern For Our Nation to launch the Wilberforce Academy in the UK, which aims to train delegates "for servant-hearted, Christ-centred leadership in public life" having equipped them "with a robust biblical framework that guides their thinking, prayers and activity in addressing the issues facing our society". Several of its delegates have already gone on to work for the legal centre and Christian Concern.
"The ADF are a fantastic organisation," Williams said. "We have been inspired by their work and that of the Blackstone programme, which seeks to raise a new generation of lawyers to defend Christianity in the public sphere. They've got some of the best attorneys in this field and we have the great privilege of hosting them, but they don't pay anything towards the academy."
Those who attend the academy programme, held at an Oxford college each year, say it increases their enthusiasm for using the law to defend the Bible. A typical comment on its website reads: "For the past four years I have sensed God calling me to the legal profession and during the Wilberforce Academy I was humbled to realise that, although we may feel like David facing Goliath, given the right weapons we may step boldly up to the task ahead."
But critics of the Christian Legal Centre suggest it rarely wins any of the legal battles it fights and claim its views are offensive to homosexuals and people from other religions. "They don't seem so keen to support religious liberty for Muslims or atheists," said Keith Porteous Wood of the National Secular Society.
However, Williams said her organisation had struck a chord with many people. "What we are finding is that people with traditional biblical views, particularly when it comes to sexual ethics, are being excluded from the public sphere, and they are not getting jobs or are finding it difficult to stay in their jobs. If you censor Christians, you end up with the first signs of tyranny."
PROMINENT CASES
Electrician Colin Atkinson was represented by the Christian Legal Centre in his dispute with Wakefield District Housing over whether he could display a crucifix in his van.
Owen and Eunice Johns from Derby became emboiled in a dispute over an application to be foster carers after a social worker questioned their views on homosexuality.
The Christian Legal Centre took up the case of Nadia Eweida, a British Airways employee who was told she could not wear a small cross around her neck.
Nurse Shirley Chaplin, an employee of Royal Devon and Exeter hospital, in her battle to wear a cross on a necklace on the wards.that superiors said was a health risk. |
/*
* Copyright (C) 2011-2014 <NAME>
* HSR Hochschule fuer Technik Rapperswil
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version. See <http://www.fsf.org/copyleft/gpl.txt>.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*/
#include "tcg_attr.h"
#include "tcg/pts/tcg_pts_attr_proto_caps.h"
#include "tcg/pts/tcg_pts_attr_dh_nonce_params_req.h"
#include "tcg/pts/tcg_pts_attr_dh_nonce_params_resp.h"
#include "tcg/pts/tcg_pts_attr_dh_nonce_finish.h"
#include "tcg/pts/tcg_pts_attr_meas_algo.h"
#include "tcg/pts/tcg_pts_attr_get_tpm_version_info.h"
#include "tcg/pts/tcg_pts_attr_tpm_version_info.h"
#include "tcg/pts/tcg_pts_attr_get_aik.h"
#include "tcg/pts/tcg_pts_attr_aik.h"
#include "tcg/pts/tcg_pts_attr_req_func_comp_evid.h"
#include "tcg/pts/tcg_pts_attr_gen_attest_evid.h"
#include "tcg/pts/tcg_pts_attr_simple_comp_evid.h"
#include "tcg/pts/tcg_pts_attr_simple_evid_final.h"
#include "tcg/pts/tcg_pts_attr_req_file_meas.h"
#include "tcg/pts/tcg_pts_attr_file_meas.h"
#include "tcg/pts/tcg_pts_attr_req_file_meta.h"
#include "tcg/pts/tcg_pts_attr_unix_file_meta.h"
#include "tcg/swid/tcg_swid_attr_req.h"
#include "tcg/swid/tcg_swid_attr_tag_id_inv.h"
#include "tcg/swid/tcg_swid_attr_tag_inv.h"
#include "tcg/seg/tcg_seg_attr_max_size.h"
#include "tcg/seg/tcg_seg_attr_seg_env.h"
#include "tcg/seg/tcg_seg_attr_next_seg.h"
ENUM_BEGIN(tcg_attr_names, TCG_SCAP_REFERENCES,
TCG_SCAP_SUMMARY_RESULTS,
"SCAP References",
"SCAP Capabilities and Inventory",
"SCAP Content",
"SCAP Assessment",
"SCAP Results",
"SCAP Summary Results");
ENUM_NEXT(tcg_attr_names, TCG_SWID_REQUEST,
TCG_SWID_SUBSCRIPTION_STATUS_RESP,
TCG_SCAP_SUMMARY_RESULTS,
"SWID Request",
"SWID Tag Identifier Inventory",
"SWID Tag Identifier Events",
"SWID Tag Inventory",
"SWID Tag Events",
"SWID Subscription Status Request",
"SWID Subscription Status Response");
ENUM_NEXT(tcg_attr_names, TCG_SEG_MAX_ATTR_SIZE_REQ,
TCG_SEG_CANCEL_SEG_EXCH,
TCG_SWID_SUBSCRIPTION_STATUS_RESP,
"Max Attribute Size Request",
"Max Attribute Size Response",
"Attribute Segment Envelope",
"Next Segment Request",
"Cancel Segment Exchange");
ENUM_NEXT(tcg_attr_names, TCG_PTS_REQ_FUNC_COMP_EVID,
TCG_PTS_REQ_FUNC_COMP_EVID,
TCG_SEG_CANCEL_SEG_EXCH,
"Request Functional Component Evidence");
ENUM_NEXT(tcg_attr_names, TCG_PTS_GEN_ATTEST_EVID,
TCG_PTS_GEN_ATTEST_EVID,
TCG_PTS_REQ_FUNC_COMP_EVID,
"Generate Attestation Evidence");
ENUM_NEXT(tcg_attr_names, TCG_PTS_SIMPLE_COMP_EVID,
TCG_PTS_SIMPLE_COMP_EVID,
TCG_PTS_GEN_ATTEST_EVID,
"Simple Component Evidence");
ENUM_NEXT(tcg_attr_names, TCG_PTS_SIMPLE_EVID_FINAL,
TCG_PTS_SIMPLE_EVID_FINAL,
TCG_PTS_SIMPLE_COMP_EVID,
"Simple Evidence Final");
ENUM_NEXT(tcg_attr_names, TCG_PTS_VERIFICATION_RESULT,
TCG_PTS_VERIFICATION_RESULT,
TCG_PTS_SIMPLE_EVID_FINAL,
"Verification Result");
ENUM_NEXT(tcg_attr_names, TCG_PTS_INTEG_REPORT,
TCG_PTS_INTEG_REPORT,
TCG_PTS_VERIFICATION_RESULT,
"Integrity Report");
ENUM_NEXT(tcg_attr_names, TCG_PTS_REQ_FILE_META,
TCG_PTS_REQ_FILE_META,
TCG_PTS_INTEG_REPORT,
"Request File Metadata");
ENUM_NEXT(tcg_attr_names, TCG_PTS_WIN_FILE_META,
TCG_PTS_WIN_FILE_META,
TCG_PTS_REQ_FILE_META,
"Windows-Style File Metadata");
ENUM_NEXT(tcg_attr_names, TCG_PTS_UNIX_FILE_META,
TCG_PTS_UNIX_FILE_META,
TCG_PTS_WIN_FILE_META,
"Unix-Style File Metadata");
ENUM_NEXT(tcg_attr_names, TCG_PTS_REQ_REGISTRY_VALUE,
TCG_PTS_REQ_REGISTRY_VALUE,
TCG_PTS_UNIX_FILE_META,
"Request Registry Value");
ENUM_NEXT(tcg_attr_names, TCG_PTS_REGISTRY_VALUE,
TCG_PTS_REGISTRY_VALUE,
TCG_PTS_REQ_REGISTRY_VALUE,
"Registry Value");
ENUM_NEXT(tcg_attr_names, TCG_PTS_REQ_FILE_MEAS,
TCG_PTS_REQ_FILE_MEAS,
TCG_PTS_REGISTRY_VALUE,
"Request File Measurement");
ENUM_NEXT(tcg_attr_names, TCG_PTS_FILE_MEAS,
TCG_PTS_FILE_MEAS,
TCG_PTS_REQ_FILE_MEAS,
"File Measurement");
ENUM_NEXT(tcg_attr_names, TCG_PTS_REQ_INTEG_MEAS_LOG,
TCG_PTS_REQ_INTEG_MEAS_LOG,
TCG_PTS_FILE_MEAS,
"Request Integrity Measurement Log");
ENUM_NEXT(tcg_attr_names, TCG_PTS_INTEG_MEAS_LOG,
TCG_PTS_INTEG_MEAS_LOG,
TCG_PTS_REQ_INTEG_MEAS_LOG,
"Integrity Measurement Log");
ENUM_NEXT(tcg_attr_names, TCG_PTS_REQ_PROTO_CAPS,
TCG_PTS_REQ_PROTO_CAPS,
TCG_PTS_INTEG_MEAS_LOG,
"Request PTS Protocol Capabilities");
ENUM_NEXT(tcg_attr_names, TCG_PTS_PROTO_CAPS,
TCG_PTS_PROTO_CAPS,
TCG_PTS_REQ_PROTO_CAPS,
"PTS Protocol Capabilities");
ENUM_NEXT(tcg_attr_names, TCG_PTS_DH_NONCE_PARAMS_REQ,
TCG_PTS_DH_NONCE_PARAMS_REQ,
TCG_PTS_PROTO_CAPS,
"DH Nonce Parameters Request");
ENUM_NEXT(tcg_attr_names, TCG_PTS_DH_NONCE_PARAMS_RESP,
TCG_PTS_DH_NONCE_PARAMS_RESP,
TCG_PTS_DH_NONCE_PARAMS_REQ,
"DH Nonce Parameters Response");
ENUM_NEXT(tcg_attr_names, TCG_PTS_DH_NONCE_FINISH,
TCG_PTS_DH_NONCE_FINISH,
TCG_PTS_DH_NONCE_PARAMS_RESP,
"DH Nonce Finish");
ENUM_NEXT(tcg_attr_names, TCG_PTS_MEAS_ALGO,
TCG_PTS_MEAS_ALGO,
TCG_PTS_DH_NONCE_FINISH,
"PTS Measurement Algorithm Request");
ENUM_NEXT(tcg_attr_names, TCG_PTS_MEAS_ALGO_SELECTION,
TCG_PTS_MEAS_ALGO_SELECTION,
TCG_PTS_MEAS_ALGO,
"PTS Measurement Algorithm");
ENUM_NEXT(tcg_attr_names, TCG_PTS_GET_TPM_VERSION_INFO,
TCG_PTS_GET_TPM_VERSION_INFO,
TCG_PTS_MEAS_ALGO_SELECTION,
"Get TPM Version Information");
ENUM_NEXT(tcg_attr_names, TCG_PTS_TPM_VERSION_INFO,
TCG_PTS_TPM_VERSION_INFO,
TCG_PTS_GET_TPM_VERSION_INFO,
"TPM Version Information");
ENUM_NEXT(tcg_attr_names, TCG_PTS_REQ_TEMPL_REF_MANI_SET_META,
TCG_PTS_REQ_TEMPL_REF_MANI_SET_META,
TCG_PTS_TPM_VERSION_INFO,
"Request Template Reference Manifest Set Metadata");
ENUM_NEXT(tcg_attr_names, TCG_PTS_TEMPL_REF_MANI_SET_META,
TCG_PTS_TEMPL_REF_MANI_SET_META,
TCG_PTS_REQ_TEMPL_REF_MANI_SET_META,
"Template Reference Manifest Set Metadata");
ENUM_NEXT(tcg_attr_names, TCG_PTS_UPDATE_TEMPL_REF_MANI,
TCG_PTS_UPDATE_TEMPL_REF_MANI,
TCG_PTS_TEMPL_REF_MANI_SET_META,
"Update Template Reference Manifest");
ENUM_NEXT(tcg_attr_names, TCG_PTS_GET_AIK,
TCG_PTS_GET_AIK,
TCG_PTS_UPDATE_TEMPL_REF_MANI,
"Get Attestation Identity Key");
ENUM_NEXT(tcg_attr_names, TCG_PTS_AIK,
TCG_PTS_AIK,
TCG_PTS_GET_AIK,
"Attestation Identity Key");
ENUM_END(tcg_attr_names, TCG_PTS_AIK);
/**
* See header
*/
pa_tnc_attr_t* tcg_attr_create_from_data(u_int32_t type, size_t length, chunk_t value)
{
switch (type)
{
case TCG_SWID_REQUEST:
return tcg_swid_attr_req_create_from_data(length, value);
case TCG_SWID_TAG_ID_INVENTORY:
return tcg_swid_attr_tag_id_inv_create_from_data(length, value);
case TCG_SWID_TAG_INVENTORY:
return tcg_swid_attr_tag_inv_create_from_data(length, value);
case TCG_SEG_MAX_ATTR_SIZE_REQ:
return tcg_seg_attr_max_size_create_from_data(length, value, TRUE);
case TCG_SEG_MAX_ATTR_SIZE_RESP:
return tcg_seg_attr_max_size_create_from_data(length, value, FALSE);
case TCG_SEG_ATTR_SEG_ENV:
return tcg_seg_attr_seg_env_create_from_data(length, value);
case TCG_SEG_NEXT_SEG_REQ:
return tcg_seg_attr_next_seg_create_from_data(length, value);
case TCG_PTS_REQ_PROTO_CAPS:
return tcg_pts_attr_proto_caps_create_from_data(length, value,
TRUE);
case TCG_PTS_PROTO_CAPS:
return tcg_pts_attr_proto_caps_create_from_data(length, value,
FALSE);
case TCG_PTS_DH_NONCE_PARAMS_REQ:
return tcg_pts_attr_dh_nonce_params_req_create_from_data(length,
value);
case TCG_PTS_DH_NONCE_PARAMS_RESP:
return tcg_pts_attr_dh_nonce_params_resp_create_from_data(length,
value);
case TCG_PTS_DH_NONCE_FINISH:
return tcg_pts_attr_dh_nonce_finish_create_from_data(length, value);
case TCG_PTS_MEAS_ALGO:
return tcg_pts_attr_meas_algo_create_from_data(length, value,
FALSE);
case TCG_PTS_MEAS_ALGO_SELECTION:
return tcg_pts_attr_meas_algo_create_from_data(length, value,
TRUE);
case TCG_PTS_GET_TPM_VERSION_INFO:
return tcg_pts_attr_get_tpm_version_info_create_from_data(length,
value);
case TCG_PTS_TPM_VERSION_INFO:
return tcg_pts_attr_tpm_version_info_create_from_data(length,
value);
case TCG_PTS_GET_AIK:
return tcg_pts_attr_get_aik_create_from_data(length, value);
case TCG_PTS_AIK:
return tcg_pts_attr_aik_create_from_data(length, value);
case TCG_PTS_REQ_FUNC_COMP_EVID:
return tcg_pts_attr_req_func_comp_evid_create_from_data(length,
value);
case TCG_PTS_GEN_ATTEST_EVID:
return tcg_pts_attr_gen_attest_evid_create_from_data(length, value);
case TCG_PTS_SIMPLE_COMP_EVID:
return tcg_pts_attr_simple_comp_evid_create_from_data(length,
value);
case TCG_PTS_SIMPLE_EVID_FINAL:
return tcg_pts_attr_simple_evid_final_create_from_data(length,
value);
case TCG_PTS_REQ_FILE_MEAS:
return tcg_pts_attr_req_file_meas_create_from_data(length, value);
case TCG_PTS_FILE_MEAS:
return tcg_pts_attr_file_meas_create_from_data(length, value);
case TCG_PTS_REQ_FILE_META:
return tcg_pts_attr_req_file_meta_create_from_data(length, value);
case TCG_PTS_UNIX_FILE_META:
return tcg_pts_attr_unix_file_meta_create_from_data(length, value);
/* unsupported TCG/SWID attributes */
case TCG_SWID_TAG_ID_EVENTS:
case TCG_SWID_TAG_EVENTS:
case TCG_SWID_SUBSCRIPTION_STATUS_REQ:
case TCG_SWID_SUBSCRIPTION_STATUS_RESP:
/* unsupported TCG/PTS attributes */
case TCG_PTS_REQ_TEMPL_REF_MANI_SET_META:
case TCG_PTS_TEMPL_REF_MANI_SET_META:
case TCG_PTS_UPDATE_TEMPL_REF_MANI:
case TCG_PTS_VERIFICATION_RESULT:
case TCG_PTS_INTEG_REPORT:
case TCG_PTS_WIN_FILE_META:
case TCG_PTS_REQ_REGISTRY_VALUE:
case TCG_PTS_REGISTRY_VALUE:
case TCG_PTS_REQ_INTEG_MEAS_LOG:
case TCG_PTS_INTEG_MEAS_LOG:
default:
return NULL;
}
}
|
<filename>backend/database/wrapper/chart/chart_data.py
import io
from enum import Enum, auto
from typing import List, Optional
import pandas as pd
from flask import send_file
class ChartDataPoint:
def __init__(self, name: str, value: float, average: Optional[float] = None):
self.name = name
self.value = value
if average is not None:
self.average = average
class ChartData:
def __init__(self, title: str, chart_data_points: List[ChartDataPoint]):
self.title = title
self.chartDataPoints = [chart_data_point.__dict__ for chart_data_point in chart_data_points]
class ChartType(Enum):
radar = auto()
bar = auto()
pie = auto()
class ChartSubcatagory(Enum):
pass
class ChartStatsMetadata:
def __init__(self, stat_name: str, type_: ChartType, subcategory: ChartSubcatagory):
self.stat_name = stat_name
self.type = type_.name
self.subcategory = subcategory.name.replace('_', ' ')
def convert_to_csv(chart_data):
mem = io.StringIO()
df = pd.DataFrame(columns=["Player"] + [c.title for c in chart_data])
df["Player"] = pd.Series([c["name"] for c in chart_data[0].chartDataPoints])
for data in chart_data:
df[data.title] = pd.Series([c["value"] for c in data.chartDataPoints])
df.to_csv(mem)
csv = io.BytesIO()
csv.write(mem.getvalue().encode())
csv.seek(0)
mem.close()
return send_file(
csv,
as_attachment=True,
attachment_filename='test.csv',
mimetype='text/csv'
)
|
#ifndef _COMMON_H_
#define _COMMON_H_
#include "BH66f2652.h"
#include "typedef.h"
#include "..\BH66F26x2_SDK\SDK_Interface.h"
#include "..\LedScan6x5\LedScan6x5_API.h"
#include "BH66F26x2_Sys.h"
#include "BodyFat_R.h"
#include "User_Protocol.h"
#include "BH66F26x2_UART.h"
#endif
|
/**
* @description upload-img test
* @author luochao
*/
import createEditor from '../../../helpers/create-editor'
import mockCmdFn from '../../../helpers/command-mock'
import mockFile from '../../../helpers/mock-file'
import mockXHR from '../../../helpers/mock-xhr'
import Editor from '../../../../src/editor'
import UploadImg from '../../../../src/menus/img/upload-img'
let editor: Editor
let id = 1
const imgUrl = 'http://www.wangeditor.com/imgs/logo.jpeg'
const errorUrl = 'logo123.jpeg'
const uploadImgServer = 'http://localhost:8881/api/upload-img'
const defaultRes = {
status: 200,
res: JSON.stringify({ data: ['url1'], errno: 0 }),
}
const mockXHRHttpRequest = (res: any = defaultRes) => {
const mockXHRObject = mockXHR(res)
const mockObject = jest.fn().mockImplementation(() => mockXHRObject)
// @ts-ignore
window.XMLHttpRequest = mockObject
return mockXHRObject
}
const createUploadImgInstance = (config: any) => {
const editor = createEditor(document, `div${id++}`, '', config)
const uploadImg = new UploadImg(editor)
return uploadImg
}
const mockSupportCommand = () => {
mockCmdFn(document)
document.queryCommandSupported = jest.fn(() => true)
}
const deaultFiles = [{ name: 'test.png', size: 512, mimeType: 'image/png' }]
const createMockFilse = (fileList: any[] = deaultFiles) => {
const files = fileList.map(file => mockFile(file))
return files.filter(Boolean)
}
describe('upload img', () => {
// mock img onload and onerror event
beforeAll(() => {
// Mocking Image.prototype.src to call the onload or onerror
// callbacks depending on the src passed to it
// @ts-ignore
Object.defineProperty(global.Image.prototype, 'src', {
// Define the property setter
set(src) {
if (src === errorUrl) {
// Call with setTimeout to simulate async loading
setTimeout(() => this.onerror(new Error('mocked error')))
} else if (src === imgUrl) {
setTimeout(() => this.onload())
}
},
})
})
beforeEach(() => {
editor = createEditor(document, `div${id++}`)
})
test('能够初始化基本的UploadImg类', () => {
const uploadImg = new UploadImg(editor)
expect(uploadImg.insertImg instanceof Function).toBeTruthy()
expect(uploadImg.uploadImg instanceof Function).toBeTruthy()
})
test('调用 insertImg 可以网编辑器里插入图片', () => {
const uploadImg = new UploadImg(editor)
mockSupportCommand()
uploadImg.insertImg(imgUrl)
expect(document.execCommand).toBeCalledWith(
'insertHTML',
false,
`<img src="${imgUrl}" style="max-width:100%;"/>`
)
})
test('调用 insertImg 可以网编辑器里插入图片,可以监听插入图片回调', () => {
const callback = jest.fn()
const uploadImg = createUploadImgInstance({
linkImgCallback: callback,
})
mockSupportCommand()
uploadImg.insertImg(imgUrl)
expect(document.execCommand).toBeCalledWith(
'insertHTML',
false,
`<img src="${imgUrl}" style="max-width:100%;"/>`
)
expect(callback).toBeCalledWith(imgUrl)
})
test('调用 insertImg 可以网编辑器里插入图片,插入图片加载失败可以通过customAlert配置错误提示', done => {
expect.assertions(1)
const alertFn = jest.fn()
const uploadImg = createUploadImgInstance({ customAlert: alertFn })
mockSupportCommand()
uploadImg.insertImg(errorUrl)
setTimeout(() => {
expect(alertFn).toBeCalledWith(
'插入图片错误',
'error',
`wangEditor: 插入图片错误,图片链接 "${errorUrl}",下载链接失败`
)
done()
}, 1000)
})
test('调用 uploadImg 上传图片', done => {
expect.assertions(1)
const jestFn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
uploadImgHooks: {
success: jestFn,
},
})
const files = createMockFilse()
const mockXHRObject = mockXHRHttpRequest()
upload.uploadImg(files)
mockXHRObject.onreadystatechange()
setTimeout(() => {
expect(jestFn).toBeCalled()
done()
}, 1000)
})
test('调用 uploadImg 上传图片,如果传入的文件为空直接返回', () => {
const upload = new UploadImg(editor)
const res = upload.uploadImg([])
expect(res).toBeUndefined()
})
test('调用 uploadImg 上传图片,如果没有配置customUploadImg, 则必须配置 uploadImgServer 或者 uploadImgShowBase64', () => {
const upload = new UploadImg(editor)
const files = createMockFilse()
const res = upload.uploadImg(files)
expect(res).toBeUndefined()
})
test('调用 uploadImg 上传图片,如果文件没有名字或者size为,则会被过滤掉', () => {
const fn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
customAlert: fn,
})
const files = createMockFilse([{ name: '', size: 0, mimeType: 'image/png' }])
const res = upload.uploadImg(files)
expect(res).toBeUndefined()
expect(fn).toBeCalledWith('传入的文件不合法', 'warning')
})
test('调用 uploadImg 上传图片,如果文件非图片,则返回并提示错误信息', () => {
const fn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
customAlert: fn,
})
const files = createMockFilse([{ name: 'test.txt', size: 200, mimeType: 'text/plain' }])
const res = upload.uploadImg(files)
expect(res).toBeUndefined()
expect(fn).toBeCalledWith('图片验证未通过: \n【test.txt】不是图片', 'warning')
})
test('调用 uploadImg 上传图片,如果文件体积大小超过配置的大小,则返回并提示错误信息', () => {
const fn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
uploadImgMaxSize: 5 * 1024 * 1024,
customAlert: fn,
})
const files = createMockFilse([
{ name: 'test.png', size: 6 * 1024 * 1024, mimeType: 'image/png' },
])
const res = upload.uploadImg(files)
expect(res).toBeUndefined()
expect(fn).toBeCalledWith(`图片验证未通过: \n【test.png】大于 5M`, 'warning')
})
test('调用 uploadImg 上传图片,如果文件个数超过配置的的大小,则返回并提示错误信息', () => {
const fn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
uploadImgMaxLength: 2,
customAlert: fn,
})
const files = createMockFilse([
{ name: 'test1.png', size: 2048, mimeType: 'image/png' },
{ name: 'test2.png', size: 2048, mimeType: 'image/png' },
{ name: 'test3.png', size: 2048, mimeType: 'image/png' },
])
const res = upload.uploadImg(files)
expect(res).toBeUndefined()
expect(fn).toBeCalledWith('一次最多上传2张图片', 'warning')
})
test('调用 uploadImg 上传图片,如果配置了 customUploadImg 选项,则调用customUploadImg上传', () => {
const fn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
customUploadImg: fn,
})
const files = createMockFilse()
const res = upload.uploadImg(files)
expect(res).toBeUndefined()
expect(fn).toBeCalled()
})
test('调用 uploadImg 上传图片,如果可以配置uploadImgParamsWithUrl添加query参数', done => {
expect.assertions(1)
const fn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
uploadImgParams: {
a: 'a',
b: 'b',
},
uploadImgParamsWithUrl: true,
uploadImgHooks: {
success: fn,
},
})
const files = createMockFilse()
const mockXHRObject = mockXHRHttpRequest()
upload.uploadImg(files)
mockXHRObject.onreadystatechange()
setTimeout(() => {
expect(fn).toBeCalled()
done()
})
})
test('调用 uploadImg 上传图片,uploadImgServer支持hash参数拼接', done => {
expect.assertions(1)
const fn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
uploadImgParams: {
a: 'a',
b: 'b',
},
uploadImgParamsWithUrl: true,
uploadImgHooks: {
success: fn,
},
})
const files = createMockFilse([
{ name: 'test1.png', size: 2048, mimeType: 'image/png' },
{ name: 'test2.png', size: 2048, mimeType: 'image/png' },
])
const mockXHRObject = mockXHRHttpRequest()
upload.uploadImg(files)
mockXHRObject.onreadystatechange()
setTimeout(() => {
expect(fn).toBeCalled()
done()
})
})
test('调用 uploadImg 上传图片失败,会有错误提示,并支持配置onError hook', done => {
expect.assertions(2)
const fn = jest.fn()
const alertFn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
uploadImgHooks: {
error: fn,
},
customAlert: alertFn,
})
const files = createMockFilse()
const mockXHRObject = mockXHRHttpRequest({ status: 500 })
upload.uploadImg(files)
mockXHRObject.onreadystatechange()
setTimeout(() => {
expect(fn).toBeCalled()
expect(alertFn).toBeCalledWith(
'上传图片错误',
'error',
'上传图片错误,服务器返回状态: 500'
)
done()
})
})
test('调用 uploadImg 上传图片成功后数据返回不正常,会有错误提示,并支持配置onFail hook', done => {
expect.assertions(2)
const fn = jest.fn()
const alertFn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
uploadImgHooks: {
fail: fn,
},
customAlert: alertFn,
})
const files = createMockFilse()
const mockXHRObject = mockXHRHttpRequest({
status: 200,
res: '{test: 123}',
})
upload.uploadImg(files)
mockXHRObject.onreadystatechange()
setTimeout(() => {
expect(fn).toBeCalled()
expect(alertFn).toBeCalledWith(
'上传图片失败',
'error',
'上传图片返回结果错误,返回结果: {test: 123}'
)
done()
})
})
test('调用 uploadImg 上传图片成功后,支持自定义插入图片函数', done => {
expect.assertions(1)
const insertFn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
uploadImgHooks: {
customInsert: insertFn,
},
})
const files = createMockFilse()
const mockXHRObject = mockXHRHttpRequest()
upload.uploadImg(files)
mockXHRObject.onreadystatechange()
setTimeout(() => {
expect(insertFn).toBeCalled()
done()
})
})
test('调用 uploadImg 上传被阻止,会有错误提示', done => {
expect.assertions(2)
const beforFn = jest.fn(() => ({ prevent: true, msg: '阻止发送请求' }))
const alertFn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
uploadImgHooks: {
before: beforFn,
},
customAlert: alertFn,
})
const files = createMockFilse()
const mockXHRObject = mockXHRHttpRequest()
upload.uploadImg(files)
mockXHRObject.onreadystatechange()
setTimeout(() => {
expect(beforFn).toBeCalled()
expect(alertFn).toBeCalledWith('阻止发送请求', 'error')
done()
})
})
test('调用 uploadImg 上传返回的错误码不符合条件会有错误提示,并触发fail回调', done => {
expect.assertions(2)
const failFn = jest.fn()
const alertFn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
uploadImgHooks: {
fail: failFn,
},
customAlert: alertFn,
})
const files = createMockFilse()
const mockXHRObject = mockXHRHttpRequest({
status: 200,
res: { test: 123, errno: -1 },
})
upload.uploadImg(files)
mockXHRObject.onreadystatechange()
setTimeout(() => {
expect(failFn).toBeCalled()
expect(alertFn).toBeCalledWith(
'上传图片失败',
'error',
'上传图片返回结果错误,返回结果 errno=-1'
)
done()
})
})
test('调用 uploadImg 上传,如果配置 uploadImgShowBase64 参数,则直接插入base64到编辑器', () => {
const callback = jest.fn()
const upload = createUploadImgInstance({
uploadImgShowBase64: true,
linkImgCallback: callback,
})
const files = createMockFilse()
const mockFn = jest.fn()
// @ts-ignore
jest.spyOn(global, 'FileReader').mockImplementation(() => {
return {
readAsDataURL: mockFn,
}
})
upload.uploadImg(files)
expect(mockFn).toBeCalled()
})
test('调用 uploadImg 上传超时会触发超时回调', done => {
expect.assertions(2)
const timeoutFn = jest.fn()
const alertFn = jest.fn()
const upload = createUploadImgInstance({
uploadImgServer,
uploadImgHooks: {
timeout: timeoutFn,
},
customAlert: alertFn,
})
const files = createMockFilse()
const mockXHRObject = mockXHRHttpRequest()
upload.uploadImg(files)
mockXHRObject.ontimeout()
setTimeout(() => {
expect(timeoutFn).toBeCalled()
expect(alertFn).toBeCalledWith('上传图片超时', 'error')
done()
})
})
})
|
Payload analysis of anonymous communication system with host-based rerouting mechanism Host-based rerouting mechanism is a routing scheme that stores and forwards data in application layer. With this, users can communicate in a indirect way. Thus, identity information such as IP addresses can be effectively hidden against eavesdropper. In anonymous communication systems, such as mixes, onion routing, and crowds, this mechanism is adopted to provide anonymity. This mechanism, however, can result in extra overhead in performance such as communication delay and participant payload, which may affect the applications of anonymous communication systems. In this paper, we study quantitatively the participant payload induced by host-based rerouting mechanisms. A probability formula for calculating the participant payload is derived, which shows that the number of participants, the number of rerouting paths, and the probability distribution of the length of rerouting paths determine the participant payload. Applying this formula to the practical anonymous communication system, crowds, we get immediately the precise expected participant payload, which significantly improves Reiter and Rubin's original analysis and demonstrates that the participant payload in crowds remains a constant and independent of the variation of the number of participants in crowds. Simulation results are presented to testify our theoretical analysis. |
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package datacatalog
import (
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
// Three different resources help you manage your IAM policy for Data catalog EntryGroup. Each of these resources serves a different use case:
//
// * `datacatalog.EntryGroupIamPolicy`: Authoritative. Sets the IAM policy for the entrygroup and replaces any existing policy already attached.
// * `datacatalog.EntryGroupIamBinding`: Authoritative for a given role. Updates the IAM policy to grant a role to a list of members. Other roles within the IAM policy for the entrygroup are preserved.
// * `datacatalog.EntryGroupIamMember`: Non-authoritative. Updates the IAM policy to grant a role to a new member. Other members for the role for the entrygroup are preserved.
//
// > **Note:** `datacatalog.EntryGroupIamPolicy` **cannot** be used in conjunction with `datacatalog.EntryGroupIamBinding` and `datacatalog.EntryGroupIamMember` or they will fight over what your policy should be.
//
// > **Note:** `datacatalog.EntryGroupIamBinding` resources **can be** used in conjunction with `datacatalog.EntryGroupIamMember` resources **only if** they do not grant privilege to the same role.
type EntryGroupIamPolicy struct {
pulumi.CustomResourceState
// Used to find the parent resource to bind the IAM policy to
EntryGroup pulumi.StringOutput `pulumi:"entryGroup"`
// (Computed) The etag of the IAM policy.
Etag pulumi.StringOutput `pulumi:"etag"`
// The policy data generated by
// a `organizations.getIAMPolicy` data source.
PolicyData pulumi.StringOutput `pulumi:"policyData"`
// The ID of the project in which the resource belongs.
// If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
Project pulumi.StringOutput `pulumi:"project"`
Region pulumi.StringOutput `pulumi:"region"`
}
// NewEntryGroupIamPolicy registers a new resource with the given unique name, arguments, and options.
func NewEntryGroupIamPolicy(ctx *pulumi.Context,
name string, args *EntryGroupIamPolicyArgs, opts ...pulumi.ResourceOption) (*EntryGroupIamPolicy, error) {
if args == nil || args.EntryGroup == nil {
return nil, errors.New("missing required argument 'EntryGroup'")
}
if args == nil || args.PolicyData == nil {
return nil, errors.New("missing required argument 'PolicyData'")
}
if args == nil {
args = &EntryGroupIamPolicyArgs{}
}
var resource EntryGroupIamPolicy
err := ctx.RegisterResource("gcp:datacatalog/entryGroupIamPolicy:EntryGroupIamPolicy", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetEntryGroupIamPolicy gets an existing EntryGroupIamPolicy resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetEntryGroupIamPolicy(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *EntryGroupIamPolicyState, opts ...pulumi.ResourceOption) (*EntryGroupIamPolicy, error) {
var resource EntryGroupIamPolicy
err := ctx.ReadResource("gcp:datacatalog/entryGroupIamPolicy:EntryGroupIamPolicy", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering EntryGroupIamPolicy resources.
type entryGroupIamPolicyState struct {
// Used to find the parent resource to bind the IAM policy to
EntryGroup *string `pulumi:"entryGroup"`
// (Computed) The etag of the IAM policy.
Etag *string `pulumi:"etag"`
// The policy data generated by
// a `organizations.getIAMPolicy` data source.
PolicyData *string `pulumi:"policyData"`
// The ID of the project in which the resource belongs.
// If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
Project *string `pulumi:"project"`
Region *string `pulumi:"region"`
}
type EntryGroupIamPolicyState struct {
// Used to find the parent resource to bind the IAM policy to
EntryGroup pulumi.StringPtrInput
// (Computed) The etag of the IAM policy.
Etag pulumi.StringPtrInput
// The policy data generated by
// a `organizations.getIAMPolicy` data source.
PolicyData pulumi.StringPtrInput
// The ID of the project in which the resource belongs.
// If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
Project pulumi.StringPtrInput
Region pulumi.StringPtrInput
}
func (EntryGroupIamPolicyState) ElementType() reflect.Type {
return reflect.TypeOf((*entryGroupIamPolicyState)(nil)).Elem()
}
type entryGroupIamPolicyArgs struct {
// Used to find the parent resource to bind the IAM policy to
EntryGroup string `pulumi:"entryGroup"`
// The policy data generated by
// a `organizations.getIAMPolicy` data source.
PolicyData string `pulumi:"policyData"`
// The ID of the project in which the resource belongs.
// If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
Project *string `pulumi:"project"`
Region *string `pulumi:"region"`
}
// The set of arguments for constructing a EntryGroupIamPolicy resource.
type EntryGroupIamPolicyArgs struct {
// Used to find the parent resource to bind the IAM policy to
EntryGroup pulumi.StringInput
// The policy data generated by
// a `organizations.getIAMPolicy` data source.
PolicyData pulumi.StringInput
// The ID of the project in which the resource belongs.
// If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
Project pulumi.StringPtrInput
Region pulumi.StringPtrInput
}
func (EntryGroupIamPolicyArgs) ElementType() reflect.Type {
return reflect.TypeOf((*entryGroupIamPolicyArgs)(nil)).Elem()
}
|
/**
* Read S0 parameter block from persistent memory.
*
* @param[in] index Index of S0 parameter block
* @param[out] s0Data Parameter block
*/
void readS0Data(uint8_t index, S0Data& s0Data)
{
S0Data s0DataDefault;
s0Data = s0DataDefault;
if (CONFIG_S0_SMARTMETER_MAX_NUM > index)
{
EEPROM.get(PSMEMORY_S0DATA_ADDR + index * sizeof(S0Data), s0Data);
}
return;
} |
<filename>test/libp2p/crypto/aes_test.cpp
/**
* Copyright Soramitsu Co., Ltd. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*/
#include "libp2p/crypto/aes_ctr/aes_ctr_impl.hpp"
#include <gtest/gtest.h>
#include <libp2p/common/literals.hpp>
#include <libp2p/common/types.hpp>
#include <libp2p/crypto/common.hpp>
#include <libp2p/outcome/outcome.hpp>
using namespace libp2p::crypto;
using namespace libp2p::common;
class AesTest : public testing::Test {
protected:
using Aes128Secret = libp2p::crypto::common::Aes128Secret;
using Aes256Secret = libp2p::crypto::common::Aes256Secret;
void SetUp() override {
std::string_view msg1 = "Single block msg";
plain_text_128.insert(plain_text_128.end(), msg1.begin(), msg1.end());
std::string_view msg2 = "The fly got to the jam that's all the poem";
plain_text_256.insert(plain_text_256.end(), msg2.begin(), msg2.end());
}
ByteArray iv{"3dafba429d9eb430b422da802c9fac41"_unhex};
ByteArray key_128{"06a9214036b8a15b512e03d534120006"_unhex};
ByteArray key_256{
"78dae34bc0eba813c09cec5c871f3ccb39dcbbe04a2fe1837e169fee896aa208"_unhex};
ByteArray cipher_text_128{"d43130f652c4c81be62fdf5e72e48cbc"_unhex};
ByteArray cipher_text_256{
"586a49b4ba0336ffe130c5f27b80d3c9910d7f422687a60b1b833cff3d9ecbe03e4db5653a671fb1a7b2"_unhex};
ByteArray plain_text_128;
ByteArray plain_text_256;
};
/**
* @given key, iv, plain text and encrypted text
* @when encrypt aes-128-ctr is applied
* @then result matches encrypted text
*/
TEST_F(AesTest, EncodeAesCtr128Success) {
Aes128Secret secret{};
std::copy(key_128.begin(), key_128.end(), secret.key.begin());
std::copy(iv.begin(), iv.end(), secret.iv.begin());
auto &&result = aes::AesCtrImpl(secret, aes::AesCtrImpl::Mode::ENCRYPT)
.crypt(plain_text_128);
ASSERT_TRUE(result);
ASSERT_EQ(result.value(), cipher_text_128);
}
/**
* @given key, iv, plain text and encrypted text
* @when encrypt aes-256-ctr is applied
* @then result matches encrypted text
*/
TEST_F(AesTest, EncodeAesCtr256Success) {
Aes256Secret secret{};
std::copy(key_256.begin(), key_256.end(), secret.key.begin());
std::copy(iv.begin(), iv.end(), secret.iv.begin());
auto &&result = aes::AesCtrImpl(secret, aes::AesCtrImpl::Mode::ENCRYPT)
.crypt(plain_text_256);
ASSERT_TRUE(result);
ASSERT_EQ(result.value(), cipher_text_256);
}
/**
* @given key, iv, plain text and encrypted text
* @when decrypt aes-128-ctr is applied
* @then result matches plain text
*/
TEST_F(AesTest, DecodeAesCtr128Success) {
Aes128Secret secret{};
std::copy(key_128.begin(), key_128.end(), secret.key.begin());
std::copy(iv.begin(), iv.end(), secret.iv.begin());
auto &&result = aes::AesCtrImpl(secret, aes::AesCtrImpl::Mode::DECRYPT)
.crypt(cipher_text_128);
ASSERT_TRUE(result);
ASSERT_EQ(result.value(), plain_text_128);
}
/**
* @given key, iv, plain text and encrypted text
* @when decrypt aes-256-ctr is applied
* @then result matches plain text
*/
TEST_F(AesTest, DecodeAesCtr256Success) {
Aes256Secret secret{};
std::copy(key_256.begin(), key_256.end(), secret.key.begin());
std::copy(iv.begin(), iv.end(), secret.iv.begin());
auto &&result = aes::AesCtrImpl(secret, aes::AesCtrImpl::Mode::DECRYPT)
.crypt(cipher_text_256);
ASSERT_TRUE(result);
ASSERT_EQ(result.value(), plain_text_256);
}
/**
* @given two identical encrypted streams
* @when one stream is decrypted at once and the second in two approaches
* @then the resulting decrypted data is equal and valid
*/
TEST_F(AesTest, Stream) {
Aes256Secret secret{};
std::copy(key_256.begin(), key_256.end(), secret.key.begin());
std::copy(iv.begin(), iv.end(), secret.iv.begin());
auto cipher_text = gsl::make_span(cipher_text_256);
const auto kDelimiter = 20;
auto cipher_text_part_1 = cipher_text.subspan(0, kDelimiter);
auto cipher_text_part_2 =
cipher_text.subspan(kDelimiter, cipher_text.size() - kDelimiter);
auto &&result_ref = aes::AesCtrImpl(secret, aes::AesCtrImpl::Mode::DECRYPT)
.crypt(cipher_text_256);
ASSERT_TRUE(result_ref);
ASSERT_EQ(result_ref.value(), plain_text_256);
aes::AesCtrImpl ctr(secret, aes::AesCtrImpl::Mode::DECRYPT);
auto &&result_part_1 = ctr.crypt(cipher_text_part_1);
auto &&result_part_2 = ctr.crypt(cipher_text_part_2);
ASSERT_TRUE(result_part_1);
ASSERT_TRUE(result_part_2);
ASSERT_EQ(plain_text_256.size(),
result_part_1.value().size() + result_part_2.value().size());
ByteArray out;
out.insert(out.end(), result_part_1.value().begin(),
result_part_1.value().end());
out.insert(out.end(), result_part_2.value().begin(),
result_part_2.value().end());
ASSERT_EQ(plain_text_256, out);
}
|
/*******************************************************************************
* Copyright (c) 2016 <EMAIL> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package io.silverspoon.bulldog.beagleboneblack;
import io.silverspoon.bulldog.core.pin.Pin;
public class BeagleBonePin extends Pin {
private int bank;
private int pinIndex;
private String am335xName;
public BeagleBonePin(String name, String am335xName, int bank, int pinIndex, String port, int indexOnPort) {
super(name, 32 * bank + pinIndex, port, indexOnPort);
this.am335xName = am335xName;
}
public int getBank() {
return bank;
}
public int getPinIndex() {
return pinIndex;
}
public int getPortNumeric() {
int portNumber = Integer.parseInt(getPort().substring(1));
return portNumber;
}
public String getAm335xName() {
return am335xName;
}
}
|
//
// HYTestService.h
// kiwi
//
// Created by pengfeihuang on 16/12/9.
// Copyright © 2016年 YY Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "HYServiceCenter.h"
@protocol IHYTestService <IWFService>
- (void)foo;
@end
@interface HYTestService : NSObject<IHYTestService>
@end
@protocol IHYTestAnontionService <IWFService>
- (void)foo;
@end
@interface HYAnontionService : NSObject<IHYTestAnontionService>
@end
@protocol IHYTestPlistService <IWFService>
- (void)hello;
@end
@interface HYPlistService : NSObject<IHYTestPlistService>
@end
|
<reponame>soerenkruck/AclyNet<filename>Core/src/com/doerte/base/Server.java<gh_stars>0
package com.doerte.base;
public class Server {
}
|
import * as child_process from 'child_process';
import * as fs from 'fs';
import * as path from 'path';
import * as archiver from 'archiver';
import * as StreamZip from 'node-stream-zip';
/**
* Recursively copy folder from src to dest
* @param source source folder
* @param destination destination folder
*/
export async function copyFolder(source: string, destination: string): Promise<boolean> {
// read contents of source directory
const entries : string[] = fs.readdirSync(source);
// synchronously create destination if it doesn't exist to ensure
// its existence before we copy individual items into it
if (!fs.existsSync(destination)) {
try {
fs.mkdirSync(destination);
} catch (err) {
return Promise.reject(err);
}
} else if (!fs.lstatSync(destination).isDirectory()) {
return Promise.reject(new Error("Unable to create directory '" + destination + "': already exists as file."));
}
let promises : Promise<boolean>[] = [];
for(let entry of entries) {
// full path of src/dest
const srcPath = path.join(source,entry);
const destPath = path.join(destination,entry);
// if directory, recursively copy, otherwise copy file
if(fs.lstatSync(srcPath).isDirectory()) {
promises.push(copyFolder(srcPath, destPath));
} else {
try {
fs.copyFileSync(srcPath, destPath);
} catch (err) {
promises.push(Promise.reject(err));
}
}
}
await Promise.all(promises).then(
(value: boolean[] ) => {
return value;
},
(reason: any) => {
console.log(reason);
return Promise.reject(reason);
}
);
return Promise.resolve(true);
}
export async function copyItem(path:string, destination:string): Promise<void> {
fs.copyFile(path, destination, (error) => {
if (error) {
return Promise.reject(error);
}
return Promise.resolve();
});
}
export function copyItemSync(path:string, destination:string): void {
fs.copyFileSync(path, destination);
}
export function exists(path:string): boolean {
return fs.existsSync(path);
}
export function stats(item:string): fs.Stats | null {
if (fs.existsSync(item)) {
return fs.lstatSync(item);
}
return null;
}
export function walk(item:string, predicate?:(item:string) => boolean): Promise<any[] | null> {
return new Promise((resolve, reject) => {
_walk(item, predicate, (error, result) => {
if (error) {
reject(error);
} else {
resolve(result);
}
});
});
}
export function walkSync(item: string): string[] {
let list: any[] = []
, files = fs.readdirSync(item)
, stats;
if (files && files.length > 0) {
files.forEach(file => {
stats = fs.lstatSync(path.join(item, file));
if (stats.isDirectory()) {
list = list.concat(walkSync(path.join(item, file)));
} else {
list.push(path.join(item, file));
}
});
}
return list;
}
/**
* Recursively apply a function on a pair of files or directories from source to dest.
*
* @param source source file or folder
* @param destination destination file or folder
* @param func function to apply between src and dest
* @return if recursion should continue
* @throws Error if function fails
*/
export async function recurse(source: string, destination: string, func: (src: string, dest: string) => Promise<boolean>): Promise<boolean> {
// apply function between src/dest
let success = await func(source, destination);
if (!success) {
return false;
}
if (fs.lstatSync(source).isDirectory()) {
// read contents of source directory and iterate
const entries: string[] = fs.readdirSync(source);
for (let entry of entries) {
// full path of src/dest
const srcPath = path.join(source, entry);
const destPath = path.join(destination, entry);
// if directory, recursively copy, otherwise copy file
success = await recurse(srcPath, destPath, func);
if (!success) {
return false;
}
}
}
return true;
}
async function _walk(dir: string, predicate?:(item:string) => boolean, done?: (error: any, result: any[] | null) => void) {
let results: any[] = [];
const applyPredicate = (results:any[]) => {
if (results && predicate) {
results = results.filter(predicate);
}
return results;
};
fs.readdir(dir, (err, list) => {
if (err) { return done ? done(err, null) : undefined; }
let pending = list.length;
if (!pending) { return done ? done(null, results) : undefined; }
if (list && list.length > 0) {
list.forEach(file => {
file = path.resolve(dir, file);
fs.stat(file, (err, stat) => {
if (stat && stat.isDirectory()) {
_walk(file, predicate, (err, res) => {
results = results.concat(res);
if (!--pending && done) { done(null, applyPredicate(results)); }
});
} else {
results.push(file);
if (!--pending && done) { done(null, applyPredicate(results)); }
}
});
});
}
});
}
export async function deleteItem(path:string): Promise<void> {
if (fs.existsSync(path)) {
fs.unlink(path, (error) => {
if (error) {
return Promise.reject(error);
}
return Promise.resolve();
});
}
}
/**
* Recursively delete a directory and all contained contents
* @param folder directory to delete
*/
export async function deleteFolder(folder: string): Promise<boolean> {
if (fs.existsSync(folder) && fs.lstatSync(folder).isDirectory()) {
let promises = fs.readdirSync(folder).map(
(entry:string) => {
let fn = path.join(folder, entry);
if (fs.lstatSync(fn).isDirectory()) {
return deleteFolder(fn);
} else {
try {
fs.unlinkSync(fn);
} catch (err) {
console.error("Failed to delete '" + fn + "':" + err);
return Promise.reject(err);
}
return Promise.resolve(true);
}
}
);
// wait for all promises
await Promise.all(promises).then(
(value: boolean[] ) => {
return value;
},
(reason: any) => {
console.log(reason);
return Promise.reject(reason);
}
);
// remove directory
try {
fs.rmdirSync(folder);
} catch(err) {
console.error("Failed to remove directory '" + folder + "': " + err);
return Promise.reject(err);
}
return Promise.resolve(true);
}
return Promise.resolve(false);
}
/**
* Recursively make directories
* @param path destination path
*/
export function makeFolderSync(destination: string, mode: string | number | null | undefined = undefined): boolean {
// check if exists
if (fs.existsSync(destination)) {
if (fs.lstatSync(destination).isDirectory()) {
return true;
} else {
return false;
}
}
// empty path, we failed
if (!path) {
return false;
}
// ensure existence of parent
let parent = path.dirname(destination);
if (!makeFolderSync(parent, mode)) {
return false;
}
// make current directory
fs.mkdirSync(destination, mode);
return true;
}
export function readFileSync(source: string, options?: string | { encoding: string; flag?:string; }): any {
return fs.readFileSync(source, options || 'utf8');
}
export function writeFileSync(destination: string, data: any, options?: { encoding?: string | null; mode?: number | string; flag?: string; } | string | null): void {
fs.writeFileSync(destination, data, options || 'utf8');
}
export async function unzip(archive:string, destination:string): Promise<number> {
const zip = new StreamZip({
file: archive,
storeEntries: true
});
return new Promise((resolve, reject) => {
zip.on('ready', () => {
makeFolderSync(destination);
zip.extract(null, destination, (err, count) => {
zip.close();
if (err) {
reject(err);
}
resolve(count);
});
});
});
}
export async function zip(out:string, items:string[], rootPath?:string): Promise<void> {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(out);
return new Promise((resolve, reject) => {
items.forEach(s => {
if (fs.existsSync(s)) {
if (rootPath) {
archive.file(s, { name: path.relative(rootPath, s) });
} else {
archive.file(s, { name: path.basename(s) });
}
}
});
archive
.on('error', err => reject(err))
.pipe(stream);
stream.on('close', () => resolve());
archive.finalize();
});
}
export async function zipFolder(out:string, source:string, subfolderName:string | false = false): Promise<void> {
if (path.extname(out) === "") {
out = path.join(out, `${path.dirname(source)}.zip`);
}
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(out);
return new Promise((resolve, reject) => {
archive
.directory(source, subfolderName)
.on('error', err => reject(err))
.pipe(stream);
stream.on('close', () => resolve());
archive.finalize();
});
}
/**
* Helper funcion to open a folder in the user's file manager
* @export
* @param {string} folder folder to open
*/
export function openFolderInExplorer(folder: string) {
let command = "";
switch (process.platform) {
case 'linux':
command = 'xdg-open';
break;
case 'darwin':
command = 'open';
break;
case 'win32':
command = 'explorer.exe';
break;
}
// executute open folder command
if (command) {
child_process.spawn(command, [folder]);
}
} |
SO THAT'S WHERE THEY WENT!
To save money when it was low on funds back in 1996, Australia's University of Western Sydney rid itself of 10,000 surplus books not by putting them in storage, but by burial in a trench next to the athletic fields. Now that the deed and volumes have been exposed, no one appears willing to admit responsibility. And, not surprisingly, their further usefulness is zero. Said a school official: "They are not in terrific shape."
In Ipoh, Malaysia, two brothers early one morning last week heard noises in another part of their house that shouldn't have been occurring. Through a window, they saw a burglar running away as though he'd been disturbed before swiping everything he wanted. End of story? Not quite. Within minutes, he was back, asking for his car keys, left behind in haste, and pleading with the brothers not to call police. When they said no, he took off again, but not before removing the license plates and tax sticker from the vehicle. The cops planned to try tracing him through the serial number on the chassis.
The New Economy With gas prices low, what are Americans spending on instead? |
Letter to the Editor: Rst Results from Iso We analyze the new mid-infrared maps of NGC 6946 for variations in the color ratio of the 7-to-15m emission. Our preliminary ndings are that this mid-infrared color is remarkably constant between arms and inter-arm regions, and as a function of radius in the disk, excluding the nuclear region. As surface brightness ranges by more than an order of magnitude and the radius runs from about 0.5 to 3 kpc, the color ratio remains constant to about 20%. Our interpretation is that hard UV radiation from OB stars does not dominate the heating of the grains radiating in the mid-infrared; and that surface brightness variations are driven primarily by surface-lling fraction in the disk, and by radiation intensity increases in starburst environments, such as the nucleus of NGC 6946. |
def map_to_png(m: folium.Map, path: str):
html_file = os.path.join(TMP_REPORTS_DIRECOTRY, 'map.html')
m.save(html_file)
options = webdriver.ChromeOptions()
options.binary_location = CHROME_PATH
chrome_driver_binary = CHROME_DRIVER_PATH
driver = webdriver.Chrome(chrome_driver_binary, options=options)
driver.set_window_size(1000, 1000)
driver.get(html_file)
time.sleep(1)
driver.save_screenshot(path) |
ObamaCare has caused hard-to-quantify economic damage, but some of the law’s regulations may be lethal—literally. Consider a Medicare hospital payment initiative, which a new study in the Journal of the American Medical Association Cardiology suggests may have contributed to an increase in deaths.
Readers are likely familiar with ObamaCare’s mandate and subsidies to impel individuals to obtain health insurance. But the law also included monetary incentives and penalties aimed at inducing changes in health-care delivery and spending reductions. The government rolled out these payment models nationally without careful study, and they are having unintended side effects.
A case in point is the Hospital Readmissions Reduction Program, which penalizes hospitals with above-average readmissions for Medicare patients. Readmissions are expensive, and the goal of the penalties is to encourage providers to take measures that reduce repeat hospitalizations—for instance, providing patients with clearer discharge instructions and coordinating with primary-care physicians.
Hospitals are graded on a curve and dunned if their 30-day readmission rate exceeds the national average. Hospitals can thus be penalized even if they reduce readmissions. The penalties, which are assessed as a share of hospitals’ Medicare payments, have been applied to an increasing number of medical conditions including knee and hip replacements.
Liberals have touted data showing that readmissions have fallen since the penalties took effect in 2013, but the JAMA researchers examined whether quality of care has improved as a result. Their observational study examined 115,245 fee-for-service Medicare beneficiaries hospitalized with heart failure across the U.S. in the four years prior to and first two years following implementation of the program.
Researchers found that the 30-day readmission rate (adjusted for patient risk) declined to 18.4% from 20% after the penalties were introduced. Yet the 30-day mortality rate increased to 8.6% from 7.2%—about 5,400 additional deaths per year. Over a one-year period the readmission rate fell by 0.9 percentage points while the mortality rate rose by five. In other words, while fewer patients were being readmitted, many more were dying.
The researchers hypothesize that the penalties might “incentivize hospitals to ‘game’ the system, using strategies such as delaying admissions beyond day 30, increasing observation stays, or shifting inpatient-type care to emergency departments.” These tricks may end up hurting patients.
Hospitals with above-average readmissions are also more likely to care for low-income patients and deal with complicated medical cases. They are usually financially strapped due to low Medicaid reimbursements, and the ObamaCare penalties may make it even harder to deliver quality care.
ObamaCare effectively enrolled Medicare patients and hospitals without their consent in a mandatory policy experiment—you’ll be better off, trust us—but then neglected to evaluate the adverse effects. A drug trial with the same results would have been shut down long ago.
The JAMA researchers conclude that, “like drugs and devices, public health policies should be tested in a rigorous fashion—most preferably in randomized trials—before their widespread adoption.” Sounds like good advice, but not the sort that ObamaCare architects and masters of the economic-planning universe like Peter Orszag and Jonathan Gruber are inclined to take.
The Trump Administration is seeking to redesign some of ObamaCare’s payment programs, including making policy experiments voluntary for providers. This has caused a fury among those on the left who believe that government coercion is the cure for all health-care maladies. Testing incentives on a small scale could prevent untold economic harm and deaths. |
THE GIFT OF MUSIC: AN INTERGENERATIONAL CAMPUS-COMMUNITY PARTNERSHIP Abstract Intergenerational programs bring individuals together across a continuum of age to share experiences. This study was designed to engage university music students with residents at a senior living community. University music student groups performed monthly at a local senior living community and completed pre- and post- performance evaluations to assess performance expectations, level of interest in the performance, perception of factors determining performance success, and perception of performance importance for the senior residents. A total of 24 students participated in one of three musical ensembles (Choir=11; Flutes=5; Steel Drums=8) during the months of February, March, and April 2022. Across all three ensembles, 50% of the students had never performed for residents at a senior living community. On a scale of 15, with 1 being Not at all and 5 being Very much, 21 students indicated a 4 or 5 as to the importance of the performance for the residents whereas 15 students indicated a 4 or 5 in response to the importance of the performance for themselves. Most students reported looking forward to the performance, and following the performance indicated it had been a success. Factors identified as determining performance success included comments such as: How much the audience enjoyed themselves! and Performers and audience enjoyed the performance. We would love to come back!. Our findings suggest that performing live music in an intergenerational campus-community setting is beneficial not only for students but also for senior living facility residents. dog walkers experienced less deterioration than non-walkers (TMT A: p=0.156; B: p=0.001; difference: p < 0.001). This study provides the first longitudinal evidence that PO may contribute to maintaining EF among community-dwelling generally healthy older adults as they age. EXAMINING HUMAN-ANIMAL INTERACTIONS AND THEIR EFFECT ON FRAILTY IN LATER LIFE: A SCOPING REVIEW Ashley Taeckens-Seabaugh, Mary Corcoran, and Kevin Morris, University of Denver, Denver, Colorado, United States Research suggests that human-animal interactions (HAIs) can improve the health and well-being of humans throughout their lifespan. While HAIs may facilitate healthy aging broadly, scant research has focused on HAIs as an intervention for adults aged 50 and older as it pertains to a comprehensive perspective of frailty. Moreover, scholarly literature lacks a consistent frailty definition, resulting in a lack of cohesion when evaluating the effectiveness of frailty interventions. This scoping review research proposes a comprehensive frailty definition and explores what is known about HAI interventions available to older adults as they relate to frailty statuses. Despite broad inclusion criteria, only four articles were relevant to this literature review, confirming the scarcity of relevant completed research thus far. Thematic analysis of reported results includes dog ownership as a protective factor regarding frailty statuses, the interconnected health effects of pet ownership, and meaning and purpose implications. Future interdisciplinary research should consider HAIs outside of pet ownership as frailty interventions for older adults, be mindful of population differences as they relate to intervention effectiveness, and work towards a universal, comprehensive definition of frailty that will aid in evaluating the frailty intervention effectiveness. Intergenerational programs bring individuals together across a continuum of age to share experiences. This study was designed to engage university music students with residents at a senior living community. University music student groups performed monthly at a local senior living community and completed pre-and post-performance evaluations to assess performance expectations, level of interest in the performance, perception of factors determining performance success, and perception of performance importance for the senior residents. A total of 24 students participated in one of three musical ensembles (Choir=11; Flutes=5; Steel Drums=8) during the months of February, March, and April 2022. Across all three ensembles, 50% of the students had never performed for residents at a senior living community. On a scale of 1-5, with 1 being "Not at all" and 5 being "Very much", 21 students indicated a "4" or "5" as to the importance of the performance for the residents whereas 15 students indicated a "4" or "5" in response to the importance of the performance for themselves. Most students reported looking forward to the performance, and following the performance indicated it had been a success. Factors Innovation in Aging, 2022, Vol. 6, No. S1 identified as determining performance success included comments such as: "How much the audience enjoyed themselves!" and "Performers and audience enjoyed the performance. We would love to come back!". Our findings suggest that performing live music in an intergenerational campuscommunity setting is beneficial not only for students but also for senior living facility residents. (Taylor & Rupp, 2004, ). This research seeks to better understand how drag expression is integrated with one's persona and how it interfaces with dragism, coping, and if drag expression can be used as a tool to foster resilience. Using a semi-structured interview protocol and thematic analysis, many themes emerged including the risks and rewards associated with drag expression, the relationship between aging and being a drag performer, and the shift in traditional versus 21st century drag. Lastly, we highlight the implications for gender and LGBTQIA+ theory and plans for future research. THEMES FROM CONVERSATIONS WITH MEDICAL TRAINEES ON LGBTQ OLDER ADULTS Shobhana Sandhu, Mackenzi Kim, Lynn Wilson, Nyann Biery, and Kimberly Infante, Lehigh Valley Health Network, Allentown, Pennsylvania, United States Approximately 6-12% of the US population 65 or older self-identifies as LGBTQ. This population faces immense barriers when accessing care, including the bias from healthcare professionals. Efforts to combat this bias through formal education are minimal. Using a mixed methods study with one-group pretest-posttest design and focus groups, medical learners were included in sessions involving a showing of Gen Silent and a post-viewing discussion. Themes from discussion were extracted by two independent reviewers. Medical learners (Nf15) included residents and faculty of psychiatric and emergency medicine at Lehigh Valley Health Network. Themes included: recognition of the social isolation faced by LGBTQ older adults, recognition of barriers to care including stigma and bias, challenges supporting patients and enabling patients' openness, a need for a community resource repository, opportunities for EMR optimization, and physicians as advocates. These results highlight the need for additional training for medical trainees as well as the efficacy of using a tool like Gen Silent to accomplish this. THE IMPACTS OF DEPRESSION AND SUICIDE ATTEMPTS AMONG OLDER MEN WHO HAVE SEX WITH MEN Alex Siu Wing Chan, 1 and Elsie Yan 2, 1. Hong Kong Polytechnic University,Hong Kong,Hong Kong,2. The Hong Kong Polytechnic University,Hong Kong,Hong Kong Suicidality among older adults has attracted much attention due to their vulnerability. Older men who have sex with men (OMSM) have rarely been studied psychologically. Study examines factors that affect the mental health of OMSM, including depression, suicidal tendency, and suicide likelihood. OMSM in the United States are analyzed using descriptive statistics for correlations between depression and suicidal tendency. A literature review helped us select scales based on the regression model we constructed. Control variables were assessed for validity and relevance. A dependent variable was depression, and a dependent variable was suicidal tendencies. Depression and suicidal tendency scores significantly differed between men who have sex with men and the general population (t = 67.084,58.193, P < 0.01). Suicidal tendencies and depression are significantly higher among homosexuals than among general groups. The regression analysis shows older men who have sex with men are more likely to suffer from depression and suicide (P < 0.01). Depression and suicide rates in OMSM are higher than those in the general population. The level of depression, in the intermediary test, mediates both the effect of OMSM on individual suicidal tendency and individual suicidal behavior (P < 0.01). Suicidal tendencies in OMSM can be reduced through depression intervention. NAVIGATING DEMENTIA DURING COVID-19: THE EXPERIENCES OF GAY AND LESBIAN OLDER ADULTS Laura Girling, 1 and Mike Splaine 2, 1. Towson University, Towson,Maryland,United States,2. Splaine Consulting,Columbia,Maryland,United States By 2030, it is estimated that 30 million individuals worldwide will have Alzheimer's disease or related-dementias (hereafter dementia). Described as a "modern epidemic of later life," dementia research has begun to reflect the diversity of our aging society with greater attention to minority populations. Nonetheless, some marginalized dementia-affected populations remain understudied. Estimates suggest more than 1 million lesbian, gay, bisexual, and transgender older adults will have dementia by 2030. Despite sizeable predictions, dementiaaffected gay and lesbian populations remain critically understudied particularly in relation to COVID-19. To date, there is scarce literature focusing on how community-dwelling gay and lesbian adults with dementia navigate the management of their condition during the COVID-19 pandemic. In order to identify how community-dwelling gay and lesbian adults manage their dementia during the COVID-19 pandemic, data were combined from two interview-based studies. Content analysis was conducted on the interview narrative of the subset of individuals with dementia identifying as gay or lesbian and their study partner(n=18). Thematic findings include: 1.triple |
Comics as Art This chapter argues that the history of physical, juxtaposed displays of comics and art in museum and gallery settings embodies curatorial containment strategies that perpetually fail. To pursue this claim is at the same time to assert that comics entrance into the art world, rather than a function of a postmodern turn and its contemporary reckoning, has been ongoing since the 1890s. To sketch this 130-year history, the chapter analyzes three key exhibitions in which museums and galleries have been unable to either fully disavow or fully integrate the connections between comics and art, comics as art, in the past century: the 1913 Armory Show, 1990s High and Low: Modern Art and Popular Culture at MoMA, and the 2013 exhibition of Ad Reinhardts comics alongside his black cruciform paintings at the David Zwirner Gallery. |
//! Write the comment strings, if any.
int
wxJSONWriter::WriteComment(wxOutputStream &os, const wxJSONValue &value, bool indent) {
int lastChar = 0;
if ((m_style & wxJSONWRITER_WRITE_COMMENTS) == 0) {
return lastChar;
}
const wxArrayString cmt = value.GetCommentArray();
int cmtSize = cmt.GetCount();
for (int i = 0; i < cmtSize; i++) {
if (indent) {
WriteIndent(os);
} else {
os.PutC('\t');
}
WriteString(os, cmt[i]);
lastChar = cmt[i].Last();
if (lastChar != '\n') {
os.PutC('\n');
lastChar = '\n';
}
}
return lastChar;
} |
<reponame>jmchs-robotics/strongback<filename>src/main/java/org/strongback/Strongback.java
/*
* Strongback
* Copyright 2015, Strongback and individual contributors by the @authors tag.
* See the COPYRIGHT.txt in the distribution for a full listing of individual
* contributors.
*
* Licensed under the MIT License; you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.strongback;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.LockSupport;
import java.util.function.BooleanSupplier;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.LongConsumer;
import java.util.function.Supplier;
import org.strongback.AsyncEventRecorder.EventWriter;
import org.strongback.Logger.Level;
import org.strongback.annotation.NotImplemented;
import org.strongback.annotation.ThreadSafe;
import org.strongback.command.Command;
import org.strongback.command.CommandState;
import org.strongback.command.Scheduler;
import org.strongback.command.Scheduler.CommandListener;
import org.strongback.components.Clock;
import org.strongback.components.Counter;
import org.strongback.components.Switch;
import org.strongback.util.Metronome;
/**
* Access point for a number of the higher-level Strongback functions. This class can be used within robot code or within unit
* tests.
*
* <h2>Configuration</h2>
* <p>
* Strongback will by default use the system logger, FPGA time (if available), and an executor that operates on a 5 millisecond
* execution period. If these defaults are not acceptable, then the Strongback library needs to be configured programmatically
* before you use it.
* <p>
* To configure Strongback, do the following once in the initialization of your robot (perhaps very early in the
* {@link edu.wpi.first.wpilibj.IterativeRobot#robotInit()} method):
* <ol>
* <li>call the {@link #configure()} method to obtain the {@link Configurator} instance,</li>
* <li>call any combination of the "use" or "set" methods on the {@link Configurator} instance,</li>
* <li>call the {@link Configurator#initialize() initialize()} method on the {@link Configurator} instance.</li>
* </ol>
* After that, the configuration should not be adjusted again, and any of the other Strongback methods can be used.
* <p>
* For example, the following code configures Strongback to use what happen to be the default logger, time system, 5ms executor
* (that uses busy-wait loops rather than {@link Thread#sleep(long)}), and automatically recording data and events to files on
* the RoboRIO:
*
* <pre>
* Strongback.configure()
* .useSystemLogger(Logger.Level.INFO)
* .useFpgaTime()
* .useExecutionPeriod(5, TimeUnit.MILLISECONDS)
* .useExecutionWaitMode(WaitMode.BUSY)
* .initialize();
* // Strongback is ready to use ...
* </pre>
*
* @author <NAME>
*/
@ThreadSafe
public final class Strongback {
public static final class Configurator {
public static enum TimerMode {
/**
* The thread uses a busy loop to prevent context switching to accurately wait for the prescribed amount of time.
* This is a very accurate approach, but the thread remains busy the entire time. See
* {@link Metronome#busy(long, TimeUnit, Clock)} for details.
*/
BUSY, /**
* The thread uses {@link Thread#sleep(long)} to wait for the prescribed amount of time. This may not be very
* accurate, but it is efficient since the thread will pause so that other work can be done by other threads.
* See {@link Metronome#sleeper(long, TimeUnit, Clock)} for details.
*/
SLEEP, /**
* The thread uses {@link LockSupport#parkNanos(long)} to wait for the prescribed amount of time. The
* accuracy of this approach will depend a great deal upon the hardware and operating system. See
* {@link Metronome#parker(long, TimeUnit, Clock)} for details.
*/
PARK;
}
private Supplier<Function<String, Logger>> loggersSupplier = () -> str -> new SystemLogger().enable(Level.INFO);
private Supplier<Clock> timeSystemSupplier = Clock::fpgaOrSystem;
private TimerMode executionWaitMode = TimerMode.BUSY;
private long executionPeriodInNanos = TimeUnit.MILLISECONDS.toNanos(20);
private volatile boolean initialized = false;
private String dataRecorderFilenameRoot = "strongback";
private String eventRecorderFilenameRoot = "strongback";
private int estimatedRecordDurationInSeconds = 180; // 3 minutes by default
private long eventRecordFileSizeInBytes = 1024 * 1024 * 2; // 2 MB by default
private boolean recordCommandStateChanges = true;
private Function<Iterable<DataRecorderChannel>, DataWriter> dataWriterFactory = this::createFileDataWriter;
private Supplier<EventWriter> eventWriterFactory = this::createFileEventWriter;
private LongConsumer excessiveExecutorDelayHandler = null;
private Supplier<String> dataRecorderFilenameGenerator = new Supplier<String>() {
private Counter counter = Counter.unlimited(1);
@Override
public String get() {
return dataRecorderFilenameRoot + "-data-" + counter.get() + ".dat";
}
};
private Supplier<String> eventRecorderFilenameGenerator = new Supplier<String>() {
private Counter counter = Counter.unlimited(1);
@Override
public String get() {
return eventRecorderFilenameRoot + "-event-" + counter.get() + ".dat";
}
};
protected DataWriter createFileDataWriter(Iterable<DataRecorderChannel> channels) {
int writesPerSecond = (int) (((double) TimeUnit.SECONDS.toNanos(1)) / executionPeriodInNanos);
return new FileDataWriter(channels, dataRecorderFilenameGenerator, writesPerSecond,
estimatedRecordDurationInSeconds);
}
protected EventWriter createFileEventWriter() {
return new FileEventWriter(eventRecorderFilenameGenerator, eventRecordFileSizeInBytes);
}
protected DataWriter createNetworkDataWriter(List<DataRecorderChannel> channels) {
return null;
}
/**
* Log messages to {@link SystemLogger System.out} at the specified level
*
* @param level the global logging level; may not be null
* @return this configurator so that methods can be chained together; never null
*/
public Configurator useSystemLogger(Logger.Level level) {
if (level == null) throw new IllegalArgumentException("The system logging level may not be null");
loggersSupplier = () -> (context) -> new SystemLogger().enable(level);
return this;
}
/**
* Log messages to custom {@link Logger} implementations based upon the supplied function that maps the string contexts
* to custom loggers.
*
* @param loggers the custom function that produces a logger for a context; may not be null
* @return this configurator so that methods can be chained together; never null
*/
public Configurator useCustomLogger(Function<String, Logger> loggers) {
if (loggers == null) throw new IllegalArgumentException("The custom loggers function may not be null");
loggersSupplier = () -> loggers;
return this;
}
/**
* Determine the time using the RoboRIO's FPGA's hardware if available, or the system time if FPGA hardware is not
* available.
*
* @return this configurator so that methods can be chained together; never null
*/
public Configurator useFpgaTime() {
timeSystemSupplier = Clock::fpgaOrSystem;
return this;
}
/**
* Determine the time using the JRE's {@link Clock#system() time system}.
*
* @return this configurator so that methods can be chained together; never null
*/
public Configurator useSystemTime() {
timeSystemSupplier = Clock::system;
return this;
}
/**
* Determine the time using a custom {@link Clock} implementation.
*
* @param clock the custom time system; may not be null
* @return this configurator so that methods can be chained together; never null
*/
public Configurator useCustomTime(Clock clock) {
if (clock == null) throw new IllegalArgumentException("The custom time system may not be null");
timeSystemSupplier = () -> clock;
return this;
}
/**
* Turn off the data recorder so that it does not record anything.
*
* @return this configurator so that methods can be chained together; never null
*/
public Configurator recordNoData() {
dataWriterFactory = null;
return this;
}
/**
* Record data to local files that begin with the given prefix.
*
* @param filenamePrefix the prefix for filenames; may not be null
* @return this configurator so that methods can be chained together; never null
*/
public Configurator recordDataToFile(String filenamePrefix) {
if (filenamePrefix == null) throw new IllegalArgumentException("The filename prefix may not be null");
dataRecorderFilenameRoot = filenamePrefix;
dataWriterFactory = this::createFileDataWriter;
return this;
}
/**
* Record data to the network tables.
*
* @return this configurator so that methods can be chained together; never null
*/
@NotImplemented
public Configurator recordDataToNetworkTables() {
throw new UnsupportedOperationException("Network data writer is not yet implemented");
// dataWriterFactory = this::createNetworkDataWriter;
// return this;
}
/**
* Record data to a custom {@link DataWriter} by supplying the factory that will create the data writer.
*
* @param customWriterFactory the factory for the {@link DataWriter} instance; may not be null
* @return this configurator so that methods can be chained together; never null
*/
public Configurator recordDataTo(Function<Iterable<DataRecorderChannel>, DataWriter> customWriterFactory) {
if (customWriterFactory == null) throw new IllegalArgumentException("The custom writer factory cannot be null");
dataWriterFactory = customWriterFactory;
return this;
}
/**
* Set the estimated number of seconds that the data recorder will capture. This is used to estimate by the data
* recorder to optimize any resources it uses.
*
* @param numberOfSeconds the estimated number of seconds of recorded data; must be non-negative
* @return this configurator so that methods can be chained together; never null
*/
public Configurator recordDuration(int numberOfSeconds) {
if (numberOfSeconds < 0) throw new IllegalArgumentException("The number of seconds may not be negative");
estimatedRecordDurationInSeconds = numberOfSeconds;
return this;
}
/**
* Record events to local files that begin with the given prefix.
*
* @param filenamePrefix the prefix for filenames; may not be null
* @param sizeInBytes the size of the files in bytes; must be at least 1024 bytes
* @return this configurator so that methods can be chained together; never null
*/
public Configurator recordEventsToFile(String filenamePrefix, long sizeInBytes) {
if (filenamePrefix == null) throw new IllegalArgumentException("The filename prefix may not be null");
if (sizeInBytes < 1024) throw new IllegalArgumentException("The event file size must be at least 1024 bytes");
eventRecorderFilenameRoot = filenamePrefix;
eventRecordFileSizeInBytes = sizeInBytes;
eventWriterFactory = this::createFileEventWriter;
return this;
}
/**
* Turn off the event recorder so that it does not record anything.
*
* @return this configurator so that methods can be chained together; never null
*/
public Configurator recordNoEvents() {
eventWriterFactory = null;
return this;
}
/**
* Automatically record all command state transitions to the event recorder.
*
* @return this configurator so that methods can be chained together; never null
*/
public Configurator recordCommands() {
recordCommandStateChanges = true;
return this;
}
/**
* Do not record any command state transitions to the event recorder.
*
* @return this configurator so that methods can be chained together; never null
*/
public Configurator recordNoCommands() {
recordCommandStateChanges = true;
return this;
}
/**
* Use the specified wait mode for Strongback's {@link Strongback#executor() executor}. This wait mode determines
* whether the executor's thread loops, sleeps, or parks until the {@link #useExecutionPeriod(long, TimeUnit) period}
* has elapsed.
*
* @param mode the desired wait mode; may not be null
* @return this configurator so that methods can be chained together; never null
* @see #useExecutionPeriod(long, TimeUnit)
*/
public Configurator useExecutionTimerMode(TimerMode mode) {
if (mode == null) throw new IllegalArgumentException("The execution timer mode may not be null");
executionWaitMode = mode;
return this;
}
/**
* Use the specified execution rate for Strongback's {@link Strongback#executor() executor}. The default execution rate
* is 5 milliseconds.
* <p>
* The clock that Strongback is configured to use will also affect the precision of the execution rate: the
* {@link #useFpgaTime() FPGA clock} will likely support rates down to around a few milliseconds, whereas the
* {@link #useSystemTime() system clock} may only support rates of 10-15 milliseconds. Therefore, this method does not
* currently allow sub-microsecond intervals.
*
* @param interval the interval for calling all registered {@link Executable}s; must be positive
* @param unit the time unit for the interval; may not be null
* @return this configurator so that methods can be chained together; never null
* @see #useExecutionTimerMode(TimerMode)
* @throws IllegalArgumentException if {@code unit} is {@link TimeUnit#MICROSECONDS} or {@link TimeUnit#NANOSECONDS}
*/
public Configurator useExecutionPeriod(long interval, TimeUnit unit) {
if (interval <= 0) throw new IllegalArgumentException("The execution interval must be positive");
if (unit == null) throw new IllegalArgumentException("The time unit may not be null");
if (TimeUnit.MILLISECONDS.toNanos(1) > unit.toNanos(interval)) {
throw new IllegalArgumentException("The interval must be at least 1 millisecond");
}
executionPeriodInNanos = unit.toNanos(interval);
return this;
}
/**
* Every time the executor takes longer than the {@link #useExecutionPeriod(long, TimeUnit) execution period} to execute
* each interval, report this to the given handler.
*
* @param handler the receiver for notifications of excessive execution times
* @return this configurator so that methods can be chained together; never null
*/
public Configurator reportExcessiveExecutionPeriods(LongConsumer handler) {
excessiveExecutorDelayHandler = handler;
return this;
}
/**
* When the supplied condition is {@code true}, call the supplied function with this Configurator.
* @param condition the condition that determines whether the supplied function should be called; may not be null
* @param configure the function that will perform additional configuration
* @return this configurator so that methods can be chained together; never null
*/
public Configurator when( boolean condition, Runnable configure ) {
return when(()->condition,configure);
}
/**
* When the supplied condition is {@code true}, call the supplied function with this Configurator.
* @param condition the function that determines whether the supplied function should be called; may not be null
* @param configure the function that will perform additional configuration
* @return this configurator so that methods can be chained together; never null
*/
public Configurator when( BooleanSupplier condition, Runnable configure ) {
if ( condition != null && configure != null && condition.getAsBoolean() ) {
configure.run();
}
return this;
}
/**
* When the supplied condition is {@code true}, call the supplied function with this Configurator.
* @param condition the condition that determines whether the supplied function should be called; may not be null
* @param configure the function that will perform additional configuration
* @return this configurator so that methods can be chained together; never null
*/
public Configurator when( boolean condition, Consumer<Configurator> configure ) {
return when(()->condition,configure);
}
/**
* When the supplied condition is {@code true}, call the supplied function with this Configurator.
* @param condition the function that determines whether the supplied function should be called; may not be null
* @param configure the function that will perform additional configuration
* @return this configurator so that methods can be chained together; never null
*/
public Configurator when( BooleanSupplier condition, Consumer<Configurator> configure ) {
if ( condition != null && configure != null && condition.getAsBoolean() ) {
configure.accept(this);
}
return this;
}
/**
* Complete the Strongback configuration and initialize Strongback so that it can be used.
*/
public synchronized void initialize() {
if (initialized) {
loggersSupplier.get()
.apply("")
.warn("Strongback has already been initialized. Make sure you configure and initialize Strongback only once");
}
initialized = true;
INSTANCE = new Strongback(this, Strongback.INSTANCE);
}
}
private static final Configurator CONFIG = new Configurator();
private static volatile Strongback INSTANCE = new Strongback(CONFIG, null);
/**
* Get the Strongback library configurator. Any configuration changes will take effect only after the
* {@link Configurator#initialize()} method is called.
*
* @return the configuration; never null
*/
public static Configurator configure() {
return CONFIG;
}
/**
* Start the Strongback functions, including the {@link #executor() Executor}, {@link #submit(Command) command scheduler},
* and the {@link #dataRecorder() data recorder}. This does nothing if Strongback is already started.
* <p>
* This is often useful to call in {@code IterativeRobot.autonomousInit()} to start Strongback and prepare for any
* autonomous based commands and start recording data and events.
*
* @see #restart()
*/
public static void start() {
INSTANCE.doStart();
}
/**
* Ensure that Strongback is {@link #start() started} and, if it was already running, {@link #killAllCommands() kill all
* currently-running commands}. It is equivalent to calling both {@code #start()} <em>and</em> {@code #killAllCommands()},
* although it is a bit more efficient.
* <p>
* This is often useful to use in {@code IterativeRobot.teleopInit()} to ensure Strongback is running and to cancel any
* commands that might still be running from autonomous mode.
*
* @see #start
* @see #killAllCommands()
*/
public static void restart() {
INSTANCE.doRestart();
}
/**
* Stop all currently-scheduled activity and flush all recorders. This is typically called by robot code when when the robot
* becomes disabled. Should the robot re-enable, all aspects of Strongback will continue to work as before it was disabled.
*/
public static void disable() {
INSTANCE.killCommandsAndFlush();
}
public static void shutdown() {
INSTANCE.doShutdown();
}
/**
* Get Strongback's automatically-configured {@link Executor} that repeatedly and efficiently performs asynchronous work on
* a precise interval using a single separate thread. Multiple {@link Executable}s can be registered with this executor, and
* doing so ensures that all of those {@link Executable}s are run on the same thread. This is more efficient than using
* multiple {@link Executor} instances, which each require their own thread.
* <p>
* Strongback's {@link #dataRecorder() data recorder}, {@link #switchReactor() switch reactor}, and {@link #submit(Command)
* internal scheduler} are already registered with this internal Executor, and therefore all use this single thread
* efficiently for all asynchronous processing.
* <p>
* However, care must be taken to prevent overloading the executor. Specifically, the executor must be able to perform all
* work for all registered {@link Executable}s during the {@link Configurator#useExecutionPeriod(long, TimeUnit) configured
* execution interval}. If too much work is added, the executor may fall behind.
*
* @return Strongback's executor; never null
* @see Configurator#useExecutionPeriod(long, TimeUnit)
* @see Configurator#useExecutionTimerMode(org.strongback.Strongback.Configurator.TimerMode)
*/
public static Executor executor() {
return INSTANCE.executables;
}
/**
* Get Strongback's global {@link Logger} implementation.
*
* @return Strongback's logger instance; never null
* @see Configurator#useSystemLogger(org.strongback.Logger.Level)
* @see Configurator#useCustomLogger(Function)
*/
public static Logger logger() {
return logger("");
}
/**
* Get Strongback's global {@link Logger} implementation.
*
* @param context the context of the logger
* @return Strongback's logger instance; never null
* @see Configurator#useSystemLogger(org.strongback.Logger.Level)
* @see Configurator#useCustomLogger(Function)
*/
public static Logger logger(String context) {
return INSTANCE.loggers.apply(context);
}
/**
* Get Strongback's global {@link Logger} implementation.
*
* @param context the context of the logger
* @return Strongback's logger instance; never null
* @see Configurator#useSystemLogger(org.strongback.Logger.Level)
* @see Configurator#useCustomLogger(Function)
*/
public static Logger logger(Class<?> context) {
return INSTANCE.loggers.apply(context.getName());
}
/**
* Get Strongback's {@link Clock time system} implementation.
*
* @return Strongback's time system instance; never null
* @see Configurator#useFpgaTime()
* @see Configurator#useSystemTime()
* @see Configurator#useCustomTime(Clock)
*/
public static Clock timeSystem() {
return INSTANCE.clock;
}
/**
* Submit a {@link Command} to be executed by Strongback's internal scheduler.
*
* @param command the command to be submitted
* @see Configurator#useExecutionPeriod(long, TimeUnit)
* @see Configurator#useExecutionTimerMode(org.strongback.Strongback.Configurator.TimerMode)
*/
public static void submit(Command command) {
if (command != null) INSTANCE.scheduler.submit(command);
}
/**
* Submit to Strongback's internal scheduler a {@link Command} that runs the supplied function one time and completes
* immediately.
*
* @param executeFunction the function to be called during execution; may not be null
*/
public static void submit(Runnable executeFunction) {
submit(Command.create(executeFunction));
}
/**
* Submit to Strongback's internal scheduler a {@link Command} that runs the supplied function one time, waits the
* prescribed amount of time, and then calls the second function.
*
* @param first the first function to be called; may not be null
* @param delayInSeconds the delay in seconds after the first function completes; must be positive
* @param second the second function to be called after the delay; may be null if not needed
*/
public static void submit(Runnable first, double delayInSeconds, Runnable second) {
submit(Command.create(delayInSeconds, first, second));
}
/**
* Submit to Strongback's internal scheduler a {@link Command} that runs the supplied function one or more times until it
* returns <code>false</code> or until the prescribed maximum time has passed, whichever comes first.
*
* @param function the function to be called at least one time and that should return <code>true</code> if it is to be
* called again; may not be null
* @param maxDurationInSeconds the maximum amount of time that the first function should be repeatedly called; must be
* positive
*/
public static void submit(BooleanSupplier function, double maxDurationInSeconds) {
submit(Command.create(maxDurationInSeconds, function));
}
/**
* Submit to Strongback's internal scheduler a {@link Command} that runs the supplied function one or more times until it
* returns <code>false</code> or until the prescribed maximum time has passed, and then calls the second function.
*
* @param first the first function to be called at least one time and that should return <code>true</code> if it is to be
* called again; may not be null
* @param maxDurationInSeconds the maximum amount of time that the first function should be repeatedly called; must be
* positive
* @param second the second function to be called after the delay; may be null if not needed
*/
public static void submit(BooleanSupplier first, double maxDurationInSeconds, Runnable second) {
submit(Command.create(maxDurationInSeconds, first, second));
}
/**
* Kill all currently-running commands.
*/
public static void killAllCommands() {
INSTANCE.scheduler.killAll();
}
/**
* Flush all data that has been recorded but not written to disk.
*/
public static void flushRecorders() {
INSTANCE.dataRecorderDriver.flush();
}
/**
* Get Strongback's {@link SwitchReactor} that can be used to call functions when {@link Switch switches} change state or
* while they remain in a specific state. The switch reactor is registered with the {@link #executor() executor}, so it
* periodically polls the registered switches and, based upon the current and previous states invokes the appropriate
* registered functions.
* <p>
* This is a great way to perform some custom logic based upon {@link Switch} states. For example, you could submit a
* specific command every time a button is pressed, or submit a command when a button is released, or run a command while a
* button is pressed. See {@link SwitchReactor} for details.
*
* @return the switch reactor; never null
* @see SwitchReactor
* @see Configurator#useExecutionPeriod(long, TimeUnit)
*/
public static SwitchReactor switchReactor() {
return INSTANCE.switchReactor;
}
/**
* Get Strongback's {@link DataRecorder} that can be used to register switches, motors, and other functions that provide
* recordable data. Once data providers have been registered, Strongback will only begin recording data after Strongback is
* {@link #start() started}, at which time the data recorder will automatically and repeatedly poll the data providers and
* write out the information to its log. Strongback should be {@link #disable() disabled} when the robot is disabled to
* flush any unwritten data and prevent recording data while in disabled mode. When the robot is enabled, it should again be
* started.
*
* @return the data recorder; never null
* @see DataRecorder
* @see Configurator#recordDataTo(Function)
* @see Configurator#recordDataToFile(String)
* @see Configurator#recordDataToNetworkTables()
* @see Configurator#recordDuration(int)
* @see Configurator#recordNoData()
*/
public static DataRecorder dataRecorder() {
return INSTANCE.dataRecorderChannels;
}
/**
* Get Strongback's {@link EventRecorder} used to record non-regular events and activities. If Strongback is configured to
* {@link Configurator#recordCommands() automatically record commands}, then all changes to command states are recorded in
* this event log. However, custom code can also explicitly {@link EventRecorder#record(String, String) record events} to
* the same log.
*
* @return the event recorder
* @see EventRecorder
* @see Configurator#recordCommands()
* @see Configurator#recordNoCommands()
* @see Configurator#recordEventsToFile(String, long)
* @see Configurator#recordNoEvents()
*/
public static EventRecorder eventRecorder() {
return INSTANCE.eventRecorder;
}
/**
* Get the number of times the {@link #executor() executor} has been unable to execute all work within the time period
* {@link Configurator#useExecutionPeriod(long, TimeUnit) specified in the configuration}.
*
* @return the number of excessive delays
*/
public static long excessiveExecutionTimeCounts() {
return INSTANCE.executorDelayCounter.get();
}
private final Function<String, Logger> loggers;
private final Executables executables;
private final ExecutorDriver executorDriver;
private final Clock clock;
private final Metronome metronome;
private final Scheduler scheduler;
private final AsyncSwitchReactor switchReactor;
private final DataRecorderChannels dataRecorderChannels;
private final DataRecorderDriver dataRecorderDriver;
private final EventRecorder eventRecorder;
private final AtomicBoolean started = new AtomicBoolean(false);
private final AtomicLong executorDelayCounter = new AtomicLong();
private final LongConsumer excessiveExecutionHandler;
private Strongback(Configurator config, Strongback previousInstance) {
boolean start = false;
if (previousInstance != null) {
start = previousInstance.started.get();
// Terminates all currently-scheduled commands and stops the executor's thread (if running) ...
previousInstance.doShutdown();
executables = previousInstance.executables;
switchReactor = previousInstance.switchReactor;
executables.unregister(previousInstance.dataRecorderDriver);
executables.unregister(previousInstance.eventRecorder);
executables.unregister(previousInstance.scheduler);
dataRecorderChannels = previousInstance.dataRecorderChannels;
excessiveExecutionHandler = previousInstance.excessiveExecutionHandler;
} else {
executables = new Executables();
switchReactor = new AsyncSwitchReactor();
executables.register(switchReactor);
dataRecorderChannels = new DataRecorderChannels();
excessiveExecutionHandler = config.excessiveExecutorDelayHandler;
}
loggers = config.loggersSupplier.get();
clock = config.timeSystemSupplier.get();
switch (config.executionWaitMode) {
case PARK:
metronome = Metronome.parker(config.executionPeriodInNanos, TimeUnit.NANOSECONDS, clock);
break;
case SLEEP:
metronome = Metronome.sleeper(config.executionPeriodInNanos, TimeUnit.NANOSECONDS, clock);
break;
case BUSY:
default:
metronome = Metronome.busy(config.executionPeriodInNanos, TimeUnit.NANOSECONDS, clock);
break;
}
// Create a new executor driver ...
executorDriver = new ExecutorDriver("Strongback Executor", executables, clock, metronome, loggers.apply("executor"),
monitorDelay(config.executionPeriodInNanos, TimeUnit.NANOSECONDS));
// Create a new event recorder ...
if (config.eventWriterFactory != null) {
eventRecorder = new AsyncEventRecorder(config.eventWriterFactory.get(), clock);
executables.register(eventRecorder);
} else {
eventRecorder = EventRecorder.noOp();
}
// Create a new scheduler that optionally records command state transitions. Note that we ignore everything in
// the previous instance's scheduler, since all commands would have been terminated (as intended) ...
CommandListener commandListener = config.recordCommandStateChanges ? this::recordCommand : this::recordNoCommands;
scheduler = new Scheduler(loggers.apply("scheduler"), commandListener);
executables.register(scheduler);
// Create a new data recorder driver ...
dataRecorderDriver = new DataRecorderDriver(dataRecorderChannels, config.dataWriterFactory);
executables.register(dataRecorderDriver);
// Start this if the previous was already started ...
if (previousInstance != null && start) {
doStart();
}
}
private LongConsumer monitorDelay(long executionInterval, TimeUnit unit) {
long intervalInMs = unit.toMillis(executionInterval);
return delayInMs -> {
if (delayInMs > intervalInMs) {
executorDelayCounter.incrementAndGet();
if (excessiveExecutionHandler != null) {
try {
excessiveExecutionHandler.accept(delayInMs);
} catch (Throwable t) {
logger().error(t, "Error with custom handler for excessive execution times");
}
} else {
logger().error("Unable to execute all activities within " + intervalInMs + " milliseconds!");
}
}
};
}
private void recordCommand(Command command, CommandState state) {
eventRecorder.record(command.getClass().getName(), state.ordinal());
}
private void recordNoCommands(Command command, CommandState state) {
}
private void doStart() {
if (!started.get()) {
try {
dataRecorderDriver.start();
} finally {
try {
executorDriver.start();
} finally {
started.set(true);
}
}
}
}
private void doRestart() {
if (started.get()) {
// Kill any remaining commands ...
scheduler.killAll();
} else {
try {
dataRecorderDriver.start();
} finally {
try {
executorDriver.start();
} finally {
started.set(true);
}
}
}
}
private void killCommandsAndFlush() {
if (started.get()) {
try {
// Kill any remaining commands ...
scheduler.killAll();
} finally {
// Finally flush the data recorder ...
dataRecorderDriver.flush();
}
}
}
private void doShutdown() {
try {
// First stop executing immediately; at this point, no executables will run ...
executorDriver.stop();
} finally {
try {
// Kill any remaining commands ...
scheduler.killAll();
} finally {
try {
// Finally flush the data recorder ...
dataRecorderDriver.stop();
} finally {
started.set(false);
}
}
}
}
}
|
Want a different perspective on the crisis in Ukraine? Check out RT.com, the state Russian broadcaster that's tasked with improving Russia's image abroad (AKA: propaganda).
"Welcome to the Democratic People's Republic of Korea," our host beamed. The plane had barely landed, and the propaganda had already started. I was officially part of the first tourist delegation to ever visit North Korea for New Year's.
And the Award Goes to...Monsanto? PLEASE SIGN!
ACTION NEEDED Stop the Killing of Stray Dogs in Romania!
Twenty bulls have been "liberated" from the town of Tordesillas in northern Spain on the eve of a controversial festival in which the whole town hunts down a single animal with spears.
WE HELPED WE HELPED An appeal has put a hold on the planned, mass execution of thousands of stray dogs in Romania, reported Reuters on Monday. |
package zee.engine.nodes;
import org.junit.Test;
public class MinusNodeTest {
@Test(expected=RuntimeException.class)
public void testExceptionOnNoChildren() throws Exception {
MathNode node = new MinNode("min()");
node.evaluate(null);
}
}
|
package org.tio.utils.cache;
import java.io.Serializable;
/**
* @author tanyaowu
* 2017年8月10日 上午11:38:26
*/
public interface ICache {
/**
* 在本地最大的过期时间,这样可以防止内存爆掉,单位:秒
*/
public static int MAX_EXPIRE_IN_LOCAL = 900;
/**
*
* 清空所有缓存
* @author tanyaowu
*/
void clear();
/**
* 根据key获取value
* @param key
* @return
* @author tanyaowu
*/
public Serializable get(String key);
/**
* 根据key获取value
* @param key
* @param clazz
* @return
* @author: tanyaowu
*/
public <T> T get(String key, Class<T> clazz);
/**
* 获取所有的key
* @return
* @author tanyaowu
*/
Iterable<String> keys();
/**
* 将key value保存到缓存中
* @param key
* @param value
* @author tanyaowu
*/
public void put(String key, Serializable value);
// /**
// * 将key value更新到缓存中
// * @param key
// * @param value
// * @author tanyaowu
// */
// public void update(String key, Serializable value);
/**
* 删除一个key
* @param key
* @return
* @author tanyaowu
*/
public void remove(String key);
/**
* 临时添加一个值,用于防止缓存穿透攻击
* @param key
* @param value
*/
public void putTemporary(String key, Serializable value);
/**
* 对象还会存活多久。
* @return time in milliseconds
* -2 if the key does not exist.
* -1 if the key exists but has no associated expire.
*/
public long ttl(String key);
}
|
Co-presence of bisphosphonate-related osteonecrosis of the jaw and oral squamous cell carcinoma in a patient with postmenopausal osteoporosis: a role for chronic traumatism? Osteoporosis is an important global issue, particularly in postmenopausal women, predisposing them to an increased risk of fracture. Menopause is indeed associated with a decrease of estrogen -mediated inhibition of bone remodeling and of bone mineral density (BMD).1 Menopause is also associated with an increase (in the burden) of coronary artery disease, diabetes mellitus, depression, cancer. Medications for these morbidities (e.g. selective serotonin reuptake inhibitors, proton pump inhibitors, thiazolidinediones, aromatase inhibitors) thought to be associated with secondary osteoporosis2 as well. The majority of osteoporosis medications are antiresorptive agents, which reduce the rate of bone remodeling and the bone resorption. These classes of drugs include bisphosphonates and Denosumab.3 Bisphosphonates (BPs) have the capacity to bind stably to the mineral component of the bone tissue and to interact with sites of the cells involved in the physiological processes of bone remodeling and, in particular, with osteoclasts. BPs are classified in Nitrogen-containing Bisphosphonates NBP and non-NPB with different mechanisms of action: the firsts inhibit an enzyme that catalyzes the biosynthesis of sterols (cholesterol) from mevalonate; especially the formation of these groups isoprenyl involves the failed prenylation of several classes of proteins that play a key role in the maintenance of the cell cycle (cell morphology, cell proliferation, signal transduction). The seconds are metabolized inside of osteoclasts in molecules similar to a non-hydrolysable ATP, causing an energy deficit and the immediate death of the cell. In general, they are all well tolerated,4 but NBP toxicity to soft tissues has been known; indeed, they inhibit epithelial cell proliferation in vitro by means of inhibition of farnesyl diphosphate synthase, the same enzyme which is the target of bisphosphonates in osteoclasts.5 Background Osteoporosis is an important global issue, particularly in postmenopausal women, predisposing them to an increased risk of fracture. Menopause is indeed associated with a decrease of estrogen -mediated inhibition of bone remodeling and of bone mineral density (BMD). 1 Menopause is also associated with an increase (in the burden) of coronary artery disease, diabetes mellitus, depression, cancer. Medications for these morbidities (e.g. selective serotonin reuptake inhibitors, proton pump inhibitors, thiazolidinediones, aromatase inhibitors) thought to be associated with secondary osteoporosis 2 as well. The majority of osteoporosis medications are antiresorptive agents, which reduce the rate of bone remodeling and the bone resorption. These classes of drugs include bisphosphonates and Denosumab. 3 Bisphosphonates (BPs) have the capacity to bind stably to the mineral component of the bone tissue and to interact with sites of the cells involved in the physiological processes of bone remodeling and, in particular, with osteoclasts. BPs are classified in "Nitrogen-containing Bisphosphonates" NBP and non-NPB with different mechanisms of action: the firsts inhibit an enzyme that catalyzes the biosynthesis of sterols (cholesterol) from mevalonate; especially the formation of these groups isoprenyl involves the failed prenylation of several classes of proteins that play a key role in the maintenance of the cell cycle (cell morphology, cell proliferation, signal transduction). The seconds are metabolized inside of osteoclasts in molecules similar to a non-hydrolysable ATP, causing an energy deficit and the immediate death of the cell. In general, they are all well tolerated, 4 but NBP toxicity to soft tissues has been known; indeed, they inhibit epithelial cell proliferation in vitro by means of inhibition of farnesyl diphosphate synthase, the same enzyme which is the target of bisphosphonates in osteoclasts. 5 The most known adverse reaction in the oral cavity of BPs is Bisphosphonate related Osteonecrosis of the Jaw (BRONJ). The first case of BRONJ in an osteoporotic patient was reported in 2004 6 and since then, a large number of case reports have been published both related to cancer and, less frequently, non-cancer patients. The major clinical sign of BRONJ is the presence of exposed necrotic bone in the oral cavity. Still, other clinical signs, considered "minor" (e.g. mucosal fistula, swelling, abscessed formation) also in absence of bone exposed, have been shown to be important for suspected BRONJ cases. 7 Indeed, diagnostic suspect of BRONJ must be supported by radiological investigation: the most frequently used radiological investigations are, on a first level, the panoramic x-ray and the intraoral x-ray and on a second level the computer tomography scan (CT), both spiral CT and cone beam CT (CBCT) (second level investigation). 8 Other common side effects are ulcerations of the upper gastrointestinal tract, predominantly reported for alendronate in its oral administration, 9 especially when patients have not taken BPs as instructed (lying down within 30 minutes of taking the medication). In addition, in 2009, a US Food and Drug Administration suggested an association between bisphosphonates and risk of oesophageal cancer 10 but, to date, the association between oral bisphosphonate use and upper gastrointestinal cancer has not been established. 7 Only few cases have been reported on patients with oral ulcerations as side effects of BPs treatment 11,12 and very few cases (only two) reported an association between oral cancer and bisphosphonate treatment. 13,14 We report a case of a patient with BRONJ due to an oral alendronate therapy for post-menopausal osteoporosis, diagnosed contemporary to oral squamous cell carcinoma (OSCC). Case report A 84-year-old Caucasian female referred to the Sector of Oral Medicine "V. Margiotta", University Policlinic "P. Giaccone" of Palermo complaining of acute pain of one month's duration on the gingiva, to the right side of the mandible. The patient had a history of osteoporosis on treatment with per os alendronate for 15 years. She also had a history of hypertension treated with calcium channel blocker. On clinical examination there was a mucosal fistula with purulent discharge (Figure 1) and a chronic ulcerative lesion ( Figure 2) on the left border of the tongue. The patient was edentulous, wearing two incongruous total dentures; she did not report any teeth extractions in the previous 8 years. After obtaining an informed consent from the patient, an incisional biopsy was performed on the left border of tongue and a panoramic x-ray and maxillary computer tomography scan (CT) was also prescribed. Histopathology confirmed a diagnosis of OSCC in the non-smoker and non-drinker patient; on the other hand, panoramic x-ray and CT showed the presence of the sclerosis bone with a radio-transparent edge in lower right mandibular hemi-arch. Both clinical and radiological features were confirmed BRONJ, stage IIb (according with SICMF/SIPMO classification). 8 She was treated with antibiotics and chlorhexidine mouth rinse, and referred to III level plastic surgery Sector for staging and treatment of OSCC and BRONJ. Discussion BRONJ is a well-known adverse reaction in cancer and, less, in non-cancer patients treated with antiresorptive agents. The diagnosis must be supported by clinical and radiological investigation. 8 For patients receiving oral BPs therapy to manage osteoporosis, the prevalence of ONJ increases over time from near 0 at baseline to 0.2% after 4 or more years of BP exposure. 15 Furthermore toxicity to soft tissues has been known; in fact, this molecule inhibits epithelial cell proliferation in vitro by means of inhibition of farnesyl diphosphate synthase, the same enzyme which is the target of bisphosphonates in osteoclasts. 5 Another common side effect found in literature is ulceration of the upper gastrointestinal tract, and to date, the association between oral bisphosphonate use and upper gastrointestinal cancer has been reported but, to date, not yet well-defined. 10 Only few cases have been reported on patients with oral ulcerations as side effects of bisphosphonate treatment 13,14 and very few cases, only two, reported an association between oral cancer and bisphosphonate treatment. 11,12 Single cases do not permit to establish an association between oral BPs and OSCC, but it is suggestive to remind that in presence of local risk factors such as oral chronic traumatism (caused by incongruous dentures)oral BPs may be cause of both, ONJ and OSCC, and the patient must therefore be monitored. Authors contributions Giorgia Capocasale had drafted the manuscript; Vera Panzarella conceived the idea; Antonio Lo Casto had acquired and commented the imaging data, Francesca Toia helped in writing and in critical revision of the manuscript; Giuseppina Campisicarried out the supervision of the study. Conflicts of interest The author declares there is no conflict of interest |
<gh_stars>0
#include <iostream>
#include <yaml-cpp/yaml.h>
#include <jaegertracing/Tracer.h>
#include <string>
void setUpTracer(const char* path) {
auto file = YAML::LoadFile(path);
auto config = jaegertracing::Config::parse(file);
auto tracer = jaegertracing::Tracer::make("service", config, jaegertracing::logging::consoleLogger());
opentracing::Tracer::InitGlobal(std::static_pointer_cast<opentracing::Tracer>(tracer));
std::cout << "[info] The tracer was successfully initialized.\n";
}
void shutdownTracer() {
auto globalTracer = opentracing::Tracer::Global();
globalTracer->Close();
std::cout << "[info] The tracer was successfully shutdown.\n";
}
std::unique_ptr<opentracing::Span> addSpan(const std::string& name) {
auto globalTracer = opentracing::Tracer::Global();
auto span = globalTracer->StartSpan(name);
return span;
}
std::unique_ptr<opentracing::Span> addChildSpan(const std::unique_ptr<opentracing::Span>& parent, const std::string& name) {
auto globalTracer = opentracing::Tracer::Global();
auto parentContext = &parent->context();
auto child = opentracing::ChildOf(parentContext);
auto span = globalTracer->StartSpan(name, { child });
std::cout << "[info] Successfully created tracer named '" << name << "'.\n";
return span;
}
void function2(const std::unique_ptr<opentracing::Span>& parent) {
auto span = addChildSpan(parent, "function2");
std::cout << "[info] function2 done.\n";
}
void function3(const std::unique_ptr<opentracing::Span>& parent) {
auto span = addChildSpan(parent, "function3");
std::cout << "[info] function3 done.\n";
}
void function1() {
auto span = addSpan("function1");
function2(span);
function3(span);
std::cout << "[info] function1 done.\n";
}
void function4() {
auto span = addSpan("function4");
function2(span);
function2(span);
function2(span);
function3(span);
std::cout << "[info] function4 done.\n";
}
int main(int length, char** arguments) {
int result = 0;
if (length < 2) {
std::cout << "[error] Please specify a configuration file.\n";
result = 1;
}
else {
setUpTracer(arguments[1]);
function1();
function4();
shutdownTracer();
}
return result;
}
|
<reponame>meshy/django-conman
import os
import dj_database_url
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
DEBUG = True
SECRET_KEY = '<PASSWORD>!'
ROOT_URLCONF = 'example.urls'
STATIC_URL = '/static/'
DATABASES = {'default': dj_database_url.config(
default='postgres://localhost/conman_example',
)}
DATABASES['default']['ATOMIC_REQUESTS'] = True
INSTALLED_APPS = (
'example',
'conman.routes',
'conman.redirects',
'polymorphic',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATES = [{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': False,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'debug': DEBUG,
'loaders': [
'django.template.loaders.app_directories.Loader',
],
},
}]
TIME_ZONE = 'Europe/London'
|
export { default } from "./ILOPage";
|
<reponame>danielccunha/open-box
export * from './encrypter'
export * from './hasher'
export * from './remove-image'
|
// Format allows for conditional expansion in printf statements
// based on the token and flags used.
func (uuid UUID) Format(s fmt.State, verb rune) {
switch verb {
case 'v':
if s.Flag('+') {
fmt.Fprint(s, uuid.ToFullString())
return
}
fmt.Fprint(s, uuid.String())
case 's':
fmt.Fprint(s, uuid.String())
case 'q':
fmt.Fprintf(s, "%b", uuid.Version())
}
} |
<reponame>StefanDimitrovDimitrov/Internship
from django.template import Library
from Internship.internship_app.forms import SearchForm
from Internship.internship_profiles.models import CompanyProfile
register = Library()
@register.inclusion_tag('tags/search_company.html', takes_context=True, )
def search_company(context):
params = extract_filter_values(context.request.GET)
company = CompanyProfile.objects.filter(
is_complete=True).filter(
company_name__icontains=params['text']
).order_by('company_name')
count_result = company.count()
return {
'count_result':count_result,
'companies': company,
'search_form': SearchForm(initial=params),
'params': params['text']
}
def extract_filter_values(params):
text = params['text'] if 'text' in params else ''
return {
'text': text
}
|
Active learning for medical code assignment Machine Learning (ML) is widely used to automatically extract meaningful information from Electronic Health Records (EHR) to support operational, clinical, and financial decision-making. However, ML models require a large number of annotated examples to provide satisfactory results, which is not possible in most healthcare scenarios due to the high cost of clinician-labeled data. Active Learning (AL) is a process of selecting the most informative instances to be labeled by an expert to further train a supervised algorithm. We demonstrate the effectiveness of AL in multi-label text classification in the clinical domain. In this context, we apply a set of well-known AL methods to help automatically assign ICD-9 codes on the MIMIC-III dataset. Our results show that the selection of informative instances provides satisfactory classification with a significantly reduced training set (8.3\% of the total instances). We conclude that AL methods can significantly reduce the manual annotation cost while preserving model performance. Introduction Extraction of clinical information from unstructured data to support operational, clinical, and financial decision making is typically manually conducted based on heuristics and previous knowledge and expertise. This manual process leads to an inefficient way of delivering care to patients in the health system. In this context, many researchers have been studying and applying various Machine Learning (ML) techniques to automatically extracting valuable insights from this unstructured data and extract diagnosis and procedure insights from clinical notes and images. ML techniques performed well on various feature extraction, classification, and regression tasks. One issue of the application of ML models is that they require a considerable amount of labeled and stratified training data to ensure robust learning, which is not always possible to obtain in the medical domain that usually lacks labeled datasets, contains rare events, and is difficult to access due to privacy. The lack of labeled data occurs because the annotation costs are high since domain experts are needed to manually label the data (like doctors, nurses, or health administrators). The interaction of an expert human in the learning process is known as human-in-the-loop machine learning (HILML), in which the expert interacts with the training process to enhance the information extraction and the optimization performance. This approach can reduce the exponential search in the functions space and feature selection. HILML techniques allow the expert to analyze and explore the data space and the features extracted based on the impact of changes in the prediction, identifying findings and insights that can support and improve the ML performance. arXiv:2104.05741v1 12 Apr 2021 In healthcare, a common HILML used is Active Learning (AL), which has been applied to clinical text data to reduce the annotation cost, i.e., selecting more relevant notes that require minor effort to be annotated. AL is typically used when the training dataset is small due to the difficulty to label samples, requiring a human annotator along the learning process to improve the model. Currently, Deep Learning models have been providing good results in the medical domain on clinical text. However, most studies involving AL with Deep Learning models have focused on image datasets, leaving the case of text as an open problem. Thus, this research aims to evaluate the possibility of employing AL methods in the medical domain, focusing on coding assignment the clinical notes. Therefore, we focus on the application of the most common active learning methods in the clinical notes of MIMIC-III database under the discharge summaries category. In this scenario, the goal is automatically to assign the ICD-9 (the Ninth Revision of International Classification of Diseases) codes based on the content of the notes. It is worth mentioning that, due to the domain aspects, the data is imbalanced and follows a power-law distribution with respect to the number of samples that correspond to each class. Besides, the assignment of ICD-9 codes is a multi-label task, i.e., the sample can be associated with one or more labels or classes. Such aspects motivate this research, because AL methods can select the most informative samples to be labeled, reducing the annotation cost, and provide a better data balance for the training set. Besides, AL can support data exploration and model interpretation, bringing information for the user about the model design and data that present uncertainty. In that sense, researchers in the visual analytics field are proposing methods that can support the interpretation of ML models in order to provide a better understanding of the data and model behavior, supporting an exploratory analysis. These tools can be combined with Active Learning models to facilitate the interaction of non-ML users with the system, improving the interpretability of AI/HIML systems in medicine. Related Work Some approaches have been proposed to support automatically assign the ICD-9 codes using clinical notes of MIMIC-II and MIMIC-III, in which deep learning proves to be the most efficient technique. Initially, machine learning methods were applied to tackle the assignment of ICD-9 codes, in which the use of hierarchical and flat Support Vector Machines (SVM) for MIMIC-II database was proposed by Perotte et al., who concluded that the hierarchical nature of ICD-9 codes improves the multi-label classification. In sequence, deep recurrent networks outperform other machine learning methods applied in this context as provided by, whose results indicate that recurrent networks capture better information as vector representations. Also, the authors claimed that a better study of the medical vocabulary could improve the assignment of ICD-9 codes. Therefore, Convolutional Neural Networks (CNNs) started to be applied in literature, improving the results in the medical domain. In this context, the DeepLabeler, proposed by Li et al., is composed of a CNN with doc2vec, showing that the convolution operation was the most effective component. Therefore, researchers started to include different operations in the CNN, such as attention mechanism and dilated convolutions, in order to improve the predictions. To provide a benchmark for the prediction of ICD-9 codes in the MIMIC-III database, deep learning, and machine learning methods were evaluated by Huang et al., who analyzed different combinations between two vectorization methods and six different classifiers. In summary, all these approaches require a large amount of labeled data to ensure effective learning of the machine and deep learning models. However, code assignment of clinical notes is performed manually, which is expensive, time-consuming, and inefficient. Thus, two potential strategies for dealing with a small and/or imbalanced training set: i) semi-supervised learning, which considers the data distribution for propagating label information through neighboring samples; and ii) Active Learning (AL), which selects the most informative samples for a human agent to label next based on given criteria. In the medical domain, it is essential to include the domain expert (e.g., medical coder) in the process in order to provide a more accurate coding of the clinical notes. This approach motivates the use of Human-in-Loop strategies, which can be combined with AL methods, once the specialist can provide the labels of the most informative samples selected. The selection of the most informative samples can reduce the manual cost to label the samples and also provide a good representation of the data distribution, supporting the classifier model to achieve a useful generalization, leading to less complex structures. In this context, a survey about AL for named entity recognition in clinical text was provided by Chen et al., in which they confirm that AL methods reduce the annotation cost for each sentence. Such reduction was also confirmed by other researchers, whose evaluated AL methods in different tasks, such as classification of clinical text, identification of possible personally identifiable or sensitive sentences, and identification of breast cancer. In summary, the models combined with AL methods reduced around 60% of labeled documents required to provide satisfactory results, and 35% of annotation time performed by a specialist. The sample selection is typically conducted based on an acquisition function that measures the uncertainty of the model when predicting unlabeled samples. If the model produces a high uncertainty for a particular instance, it means that the model has not enough knowledge to classify that instance, concluding that such an instance is good to incorporate in the training set. Thus, the design of the acquisition function to evaluate the most informative samples to be labeled is the main challenge in the AL field. However, depending on the domain, an issue with the uncertainty metrics is that noise and outliers can be selected, jeopardizing the learning process of the model. Therefore, another strategy to select the most informative samples is to apply feature correlation derived from the semi-supervised learning algorithms. Thus, the idea is that grouping the samples based on their similarities can support the selection of a representative instance for a subset of samples, avoiding outliers instances. In the case of deep learning algorithm, some other approaches were developed. For instance, the Learning Loss AL includes a loss prediction module in the learning model to predict the loss of unlabeled samples Yoo and Kweon. Another DL technique is the Bayesian Active Learning by Disagreement (BALD) developed by Houlsby et al.. Some studies concluded that BALD does not perform well with an imbalanced test set and in the first few samples. Finally, in the medical domain, a deep learning algorithm combined with AL strategies was applied to tackle the problem of medical symptoms recognition from patient text by Mottaghi et al., whose employed clustering methods, selecting the samples closest to the centroid. They conclude that such a strategy can balance the training set, selecting equally samples associated with different labels. In this context, to the best of our knowledge, AL strategies applied in the medical domain for coding assignment have not been deeply explored, motivating this study to evaluate the performance of common AL strategies combined with well-known classifiers applied in MIMIC-III dataset. Methodology In supervised machine learning, algorithms must induce some function f : X → Y that minimizes the classification/regression error for a given learning task, producing a classifier in some space F (algorithm bias). Such algorithms consider a training set composed of independent samples that are selected from the joint probability distribution P (X Y). Each instance is defined as (x i, y i ) ∈ (X, Y), in which x i is a vector that represents the attributes and y i is its correspondent label for an i instance. Typically, an instance is associated with a single label, which is the case of binary and multi-class classification. However, in the context of the coding assignment, each instance is associated with one or more labels, known as multi-label classification. In this scenario, y is also represented by a vector containing the label's information. The databases for such a task commonly have a small number of labeled clinical documents due to the cost of manually labeling them by the medical specialists. Also, these databases are imbalanced. Therefore, the database contains more samples for some labels than others. In fact, in the MIMIC-III dataset used in this evaluation, the data is highly unbalanced, containing no examples for almost 50% of ICD-9 codes. Such an imbalanced and small number of labeled clinical notes can jeopardize the learning in supervised algorithms. These issues motivate the application of Active Learning (AL), which typically receives a set of labeled instances D l = (x 0, y 0 ), (x 1, y 1 ),, (x n, y n ) and huge number of unlabeled instances D u = (x 0, y 0 ), (x 1, y 1 ),, (x m, y m ). Thus, the goal of AL is to select the most informative instances to be manually label by a specialist and be incorporated in the training set, because, in most cases, the training set has not enough data to learn each label. This selection can avoid the time consuming of manual annotation and provide a good representation of the joint probability distribution P (X Y), i.e., a stratified and enough samples to conduct the learning. Acquisition functions (e.g., evaluation metrics) are applied to measure the relevance of instances in D u to select the most informative one. The typically two strategies applied are uncertainty metric and correlation, in which the former evaluates how much the model is uncertain about a particular instance, while the latter measures the similarity among instances. Therefore, we applied Least Confidence (LC) and Binary Entropy measures as uncertainty metrics and kmeans++ as a feature correlation strategy in the MIMIC-III database to evaluate the performance of AL in the coding assignment. Frameworks When working with text data, the first step as data mining is to convert the text into numeric vectors. As we are following the Huang et al. 's frameworks, we used Term Frequency-Inverse Document Frequency (TFIDF) and Word2Vec with Continuous Bag-of-Words Model (CBOW) in this study. TFIDF attempts to represent the importance of words in the document. Such a method extracts words that appear often in a document. However, it is not common in the collection under analysis. TFIDF is commonly used as a feature extractor in the MIMIC database, which is followed by a machine learning technique in order to perform the coding assignment. On the other hand, Word2Vec employs neural networks to convert words into numeric vectors keeping the semantic relationship among them. In the coding assignment of the MIMIC database, Support Vector Machine (SVM), Logistic Regression, and Random Forest are the most employed machine learning techniques. However, they were outperformed by deep learning models, such as Feed-forward Neural Network (FNN), Convolutional Neural Networks (CNN), and Recurrence Neural Networks (RNN). Typically, the multi-label task seems to be a binary classification problem, in which a model is created for each ICD-9 code under analysis. Then, the model will classify if the clinical note is associated or not with this particular code. Thus, in machine learning models, a model is trained for each label, while, for deep learning models, each neuron in the output layer will represent a label and provide a probability for each one. A benchmark to serve as a baseline for new approaches for this task was proposed by Huang et al., which we will use to evaluate the efficiency of AL in the coding assignment. They employed Logistic Regression and Random Forest, in which Logistic Regression provided the best results. Logistic regression estimates the probability of samples belonging to a class or not, a binary classification task. On the other hand, Random Forest is an ensemble learning method that combines the classification provided by a set of non-correlated decision trees in order to avoid overfitting and reduce the variance. In the context of deep learning, they employed Convolutional Neural Network (CNN), Feed-Forward Neural Network (FNN), Long Short-Term Memory Neural Network (LSTM), and Gated Recurrent Unit Neural Network (GRU). So, we started employing the FNN, which is also known as Multilayer Perceptron (MLP), using backpropagation to update the parameter of the network. One challenge faced by researchers is to identify the best design of the network to produce a good classification. In this aspect, Huang et al. employed a FNN with 3 hidden layers, having 5000, 500, and 100 neurons respectively, while Nigam employed 2 hidden layers with 300 and 100 neurons respectively. In both cases, they employed ReLU as activation function in hidden layers, sigmoid function in the output layer, binary cross-entropy as the loss function, and stochastic gradient descent as the optimizer. In case of top 100 ICD-9 codes, Nigam uses 1000 and 1000 neurons. Active Learning methods In this scenario, we started employing the uncertainty metric, in which the model predictions are used to evaluate the unlabeled instances, whose predicted results are represented by vectors that contain the posterior probability of each label. We applied Binary Entropy and Least Confidence measures as acquisition functions to select the most informative sample. Least confidence measures the uncertainty of instances using the higher posterior probability produced by the model among all labels under analysis. Thus, in the case of multi-class classification, the highest posterior probability provided the most likely label that the instance should be associated with. However, we are dealing with multi-label classification that is transformed into a binary classification. Then, if the posterior probability provided is closest to 0.5, the model is uncertain about the classification of that instance. It is worth to mention that, when the task is multi-label classification, a threshold of the probability is set according to the domain under analysis. The Least Confidence LC(x k ) measure of an instance k is defined in equation 1, in which p (y|x k ) corresponds to the posterior probability of instance x k to belongs to class y. The closer the Least Confidence measure is to 0, the more uncertain the model is about that particular instance. The entropy introduced by Shannon also measures the level of uncertainty of samples obtained from a probability distribution, i.e. the randomness of the chance variable. Thus, the lower is the entropy, the more certain the model about its prediction. The Binary Entropy H is used to evaluate the uncertainty of binary classification, i.e., the prediction can be only 0 or 1. Therefore, we applied the Binary Entropy to measure the uncertainty of each clinical note that it belongs to an ICD-9 code. The Binary Entropy H(x k ) of an instance k is defined in equation 2, in which p (y|x k ) corresponds to the posterior probability of instance x k to belongs to class y, that was produced by the model. The closer the Binary Entropy measure is to 0, the more confident the model is about that particular instance. As it is still a multi-label task, one challenge was how to combine the uncertainty measures obtained by each label. Then, we decided to apply the arithmetic mean and the mode operations to combine the uncertainty measure of an instance for all labels under analysis. In this scenario, we expected that, with the arithmetic mean operation, the instances selected are the ones that tend to be uncertain. While, with mode operation, the instances selected will be the ones that present uncertainty in the most labels since it returns the value that appears most often. On the other hand, the correlation strategy selects the most informative instances based on semi-supervising algorithms. In this scenario, the clustering process is suitable for an acquisition function because it did not require the label information, grouping instances based on their similarity, which results in subsets of instances with related features. Thus, the selection of a few instances of each group can provide a good representation of the input space for the model to learn the probability distribution. In this case, we applied the kmeans++ to cluster the clinical notes features, following by an instance selection inside each cluster. One challenge faced in this scenario is which distance metric should be used. We decided to start with Euclidean distance because it is the most typically applied. For instance selection, we evaluate three different methods: i) random selection, ii) select the ones closest to the centroid, and iii) select the ones closest to the border, i.e., most far from the centroid. Finally, we attempt to combine both measures in two different ways, the Two-Stage (TS) metric and the Weighted Uncertainty Metric (WUM). In the TS method, the goal is to realize a previous selection on the overall instances in the set D u before applying the uncertainty metric. Thus, we applied the kmeans++ and randomly selected a defined number of instances of each cluster to explore different regions in the input space. Next, the uncertainty measure is employed to evaluate these instances selected. In the case of WUM, a similarity measure is applied as a weight for the relevance obtained with the uncertainty measure. Such a measure should be the average similarity over all instances in the set D u. However, due to the high computational cost, this distance is calculated between the instance and the centroid of the cluster. Thus, the instances are grouped using Kmeans++ as well, and the distance between the instance and the centroid is used as a weight for the uncertainty metric. Experiments The Medical Information Mart for Intensive Care (MIMIC-III) is a large medical database that contains more than a decade of information about admitted patients in critical care units at the Beth Israel Deaconess Medical Center in Boston, Massachusetts. MIMIC-III comprises different types of information, such as medications, observations, notes charted by care providers, procedure codes, diagnostic codes, and imaging reports. Patients in this database are de-identified, allowing their free access to international researchers under a data use agreement. The clinical notes of the MIMIC-III database are divided into categories, in which the "discharge summary" is employed in this experiment. The goal of this task is to identify the ICD-9 code of each clinical note automatically. To tackle this task, we selected similar methods in the literature to apply active learning to, which include Random Forest, Logistic Regression, FNN, and CNN. Initially, the clinical notes are cleaned, in which stopwords, punctuation, and de-identifiers are removed, and all text is transformed to lower case. Next, the vectorization method is applied to convert the text into numeric vectors, in which TFIDF and Word2Vec are used as employed in the Huang et al. 's framework. As the vectorization did not require the label information, this process is conducted before the selection of instances. In Active Learning, random selection is used as the baseline, so we used random selection and 's framework results as baselines, which will be called as the benchmark. The acquisition function used was Binary Entropy with mean and with mode, Least Confidence with mean and with mode, kmeans++ with the three variations (random, center, border), average linkage with three variations (random, center, border), and a two-stage strategy, including entropy and Least Confidence with mean combined with kmeans++. Such functions are applied to the feature space produced by the vectorization method. As the MIMIC-III clinical notes are an imbalanced dataset, we are using the division provided by Mullenbach et al. to have the training, validation, and test sets, allowing the experiments to be compared. Therefore, the training set is used as the instances pool for the active learning selection, while the test set is used to measure the model performance after training in the selected samples. The number of clinical notes for each set according to division from Mullenbach et al. for each top ICD-9 code under analysis is presented in table 1. It is worth mentioning that we do not consider clinical notes that do not have labels in the top list under analysis. Setting up The most parameters used in this experiments was the same that provide the better results reported in benchmark. However, we modified the models to make them less complex, because AL has a small amount of data for the training process, and complex models may overfit the data. In the case of Random Forest, we used 30 trees as the benchmark settings. However, we also set up the maximum depth as 500. For Logistic Regression, we used the Limited-memory Broyden-Fletcher-Goldfarb-Shanno algorithm (LBFGS) to estimate the parameters because it is adequate for largescale problems, i.e., that contains many variables. Next, the architecture used for the FNN method is composed of 2 hidden layers, having 500 and 100 neurons, respectively, with ReLU activation function. A sigmoid function was used in the output layer, in which the number of neurons is the same as the number of labels under analysis. In each iteration of the experiments, 10 samples were selected using a particular acquisition function, excepted for the first iteration in which random samples were selected. We set the maximum number of iterations to 300, leading to a training set of 3000 clinical notes in the last iteration. The evaluation for the training set used in the experiments was conducted using just the samples selected to train the model, while the evaluation for the test set was performed on the entire test set. Top 10 ICD-9 codes Considering the top 10 ICD-9 codes, we evaluated different acquisition functions using the Feed-forward Neural Network (FNN) to understand the behavior of each acquisition function in the feature space. The F1 measure (micro) for each acquisition function is presented in fig. 1. All measures obtained is shown in table 2 for 3000 instances selected. All acquisition functions outperformed the results reported in the benchmark. In this scenario, the uncertainty metrics provided lower or similar results than the random selection. Although the mean operation provided better results, the mode and mean operation do not result in a significant difference. We believe that this behavior is because the uncertainty points are too closed to the hyperplanes margin, representing the overlap and outliers points of each class, making the region definition for the class representation difficult. On the other hand, some configurations using the clustering method outperform the random selection, improving the recall and precision measures. The selection of the samples farthest from the centroid (border) performed worse than the random and center selection. We believed that this occurs because the border samples are positioned in the overlap of the probabilities distributions of each label, which may represent an outlier, jeopardizing the model learning. The selection of the samples closest to the centroid (center) performed worse than the random selection. We believed that the center samples are a good representation of the local region, allowing the model convergence. However, these samples are not enough to provide a good representation of the probability distribution. These behaviors explain why the random selection points inside each cluster performed better because, with random selection, samples in the middle of the region could be selected, providing a good representation of the class region. In summary, some acquisition functions with FNN outperformed the results reported in the benchmark and the random selection. On the benchmark, they reported that FNN achieved an F1-measure (micro) of 0.53, while the random selection achieved 0.61 with 3000 instances. In this scenario, the best result obtained was 0.65 with kmeans++ (10 groups) with a random selection and 3000 instances. We believed that the results were better than the benchmark because a most simple architecture combined with a suitable selection of the most informative points could better represent the input space, leading to better learning. Moreover, these results indicate that instances selection based on the features correlations is more suitable for FNN classifier because the uncertainty metric selection provided similar results with the random selection. Next, we did the same evaluation using Random Forest classification. The F1 measure (micro) for each acquisition function is presented in fig. 2. All measures obtained is shown in table 3 for 3000 instances selected. All acquisition functions outperformed the results reported in the benchmark, in which mean operation provided a better recall measure, while mode operation provides a better precision measure. In this scenario, some acquisition functions with Random Forest outperformed the results reported in the benchmark and the random selection. The benchmark reported that Random Forest achieved an F1-measure of 0.32, while the random selection achieved 0.44 with 3000 instances. Thus, the best result obtained with AL strategies was 0.5 using kmeans++ with random selection inside each cluster (10 groups) and 3000 instances. We believed that the results reported were higher than the benchmark due to the definition of the depth parameter of the classifiers. Moreover, a suitable selection of the most informative instances for training set improved the representation of the probability distribution and improved the learning. In summary, the combination of both strategies is a good fit for the Random Forest classifier, although individual strategies also produce satisfactory results. We also executed the same analysis using Logistic Regression, in which the F1 measure (micro) for each acquisition function is presented in fig. 3. All measures obtained is shown in table 4 for 3000 instances selected. All acquisition Table 2: Measures obtained using different acquisition functions with FNN for coding assignment of top 10 ICD-9 codes with 3000 instances selected gradually. In this scenario, Prec. is Precision, F1 is the F1-measure, BE represents the Binary Entropy, LC is the Least Confidence, W corresponds to the weighted uncertainty metric, TS is the Two-Stage method, R represents the random selection, C corresponds to the selection of points closest to the centroid, and B is the selection of points far from the centroid. All representation is followed by the number of clusters analyzed. functions outperformed the results reported in the benchmark. In this scenario, some acquisition functions with Logistic regression outperformed the results reported in the benchmark for the test set but provided lower measures on the training set, except in precision measurement. However, this behavior on the training set indicates that the Logistic Regression can provide a better generalization because the measure obtained on the training set is similar to the one obtained with the test set. In the benchmark, they reported that Logistic Regression achieved an F1-measure of 0.53, while the random selection achieved 0.5 with 3000 instances. Thus, the best result obtained with AL strategies was 0.64 using kmeans++ with random selection inside each cluster (10 groups) and 3000 instances. Those results indicate that uncertainty metrics and features correlation can be a suitable AL strategy for this task. However, the combination of both did not improve the results. Table 3: Measures obtained using different acquisition functions with Random Forest for coding assignment of top 10 ICD-9 codes with 3000 instances selected gradually. In this scenario, Prec. is Precision, F1 is the F1-measure, BE represents the Binary Entropy, LC is the Least Confidence, W corresponds to the weighted uncertainty metric, TS is the Two-Stage method, R represents the random selection, C corresponds to the selection of points closest to the centroid, and B is the selection of points far from the centroid. All representation is followed by the number of clusters analyzed. Finally, we evaluate the AL strategies combined with CNN, composed of one embedding layer, two convolution layers interposed by a max-pooling layer, and an output layer that applies sigmoid as the activation function. The convolution layers have 5 5 filter size, 128 neurons, and ReLU as activation function, while the max-pooling layers built with a 5 5 filter size without a stride. The loss function used was the binary cross-entropy, while the optimizer was Adam. The word2vec matrix obtained with the entire training set of the MIMIC-III dataset was employed as the values for the embedding layer, which was not trained with the CNN parameters. According to the results, the AL strategies could not provide similar results as the benchmark and could not outperform the random selection, indicating that these methods are not appropriate to select representative instances for the CNN classifier. However, the Binary Entropy with the mean operation improved the precision when compared to random selection. In the benchmark, they reported that CNN achieved an F1-measure of 0.64, while the random selection achieved 0.38 with 3000 samples. Thus, the best result obtained with AL strategies was 0.4 using Binary Entropy with mean operation and 3000 instances. However, these results was worse than the benchmark, and similar with random selection. Such behavior indicates that more studies are require to apply Al strategies with the CNN classifier, which produce an overfit for all configurations executed. In fact, this task requires a high complexity CNN model and, consequently, a large amount of instances. In summary, AL strategies combined with different classifiers provide good results for coding assignments with 10 labels on the MIMIC-III database. This evaluation indicates that each classifier requires a different instance selection due to its particular preference bias to provide a good model of the data. The benchmark best F1-measure reported was Applying the best configurations Considering the top 50 ICD-9 codes, we executed just the random selection as a baseline and the respective configuration that provided the best results, which was kmeans++ with random selection inside each cluster (10 groups), outperforming the random selection. All measures obtained is shown in table 7 for 3000 instances selected, except the CNN because it did not provide good results with the AL strategies employed in those experiments. In the case of the FNN classifier, all configurations executed outperformed the results reported in the benchmark, however only the clustering methods as acquisition function provided better results than the random selection. In the Random Forest classifier, all configurations executed outperformed or produced similar results like the ones reported in the benchmark and the random selection. The Logistic Regression combined with AL strategy also produced better results compared with the random selection and the benchmark. It is worth mentioning that Logistic Regression requires a set of instances representing all labels, so this algorithm starts with a random selection of 50 samples that represents all labels. Conclusions In this research, we evaluate the possibility of employing the most common active learning methods in the medical domain, focusing on coding assignment the clinical notes under the discharge summaries category. Typically, the data in this scenario is imbalanced due to the domain aspects, and, as the notes are manually annotated, the cost of coding the data is too high. Such aspects motivate the use of Active Learning methods because it can select the most informative samples to be labeled, reducing the annotation cost, and provide a better data balance for the training set. According to the results, AL methods combined with the classifier models produced similar results to those reported in the literature, indicating that the selection of the most informative samples provides a good representation of the overall training set. The baseline for AL methods is random selection, which was outperformed by most of the techniques employed for different classifiers. Such results confirmed that AL methods could be used in the medical domain for coding assignment, keeping the results already achieved, and reducing the annotation cost because just a few documents have to be manually labeled. Besides, we noticed that using mode operation to combine the label information can improve the precision measure, while the mean operation enhances the recall. Thus, according to the scenario, this can be modified to satisfy the goals of the task under analysis. One limitation of the application of AL is that the analysis of every unlabeled instance can be time-consuming. Besides, each model has a different search bias, indicating that other DL models may provide better results with a different approach. As future work, studies on AL with Bert models for coding assignment are required, once these models present the best results when dealing with the MIMIC-III database in a high dimensional labels space. However, as more complex is the model, more labeled instances are required to ensure the learning. Moreover, a better analysis of the clustering algorithms should be conducted, once the kmeans++ provided better results for the most scenarios analyzed. In this context, different clustering algorithms and distance metrics should be evaluated to improve the results produced by AL methods. Finally, visual analysis to interpret the model behavior or continuous learning to deal with situations that do not have all the label information is a possible mechanism to improve the medical domain tasks. Acknowledgments This work was supported by Mitacs Accelerate Program and the Semantic Health Company as the partner organization. |
# Generated by Django 2.1.7 on 2019-05-12 06:28
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('device_registry', '0028_auto_20190511_0723'),
('device_registry', '0024_fix_scan_info_content'),
]
operations = [
]
|
Sirolimus in the Management of Blue Rubber Bleb Nevus Syndrome: A Case Report and Review of the Literature Blue rubber bleb nevus syndrome (BRBNS) is a rare multifocal venous malformation (VM) that may affect any tissue or organ but mainly affects the skin, subcutaneous tissue and gastrointestinal (GI) tract. Patients present with serious anemia, treated with lifelong iron supplements and frequent blood transfusion secondary to chronic GI bleeding. Variable therapeutic modalities were used in the management of BRBNS; sirolimus (SRL), a mammalian target of rapamycin (mTOR) inhibitor, is found to be a promising therapy for vascular anomalies. Introduction Blue rubber bleb nevus syndrome (BRBNS) is a rare multifocal venous malformation (VM) that may affect any tissue or organ, but mainly affects the skin, subcutaneous tissue and gastrointestinal (GI) tract. BRBNS might present at birth (30%), infancy (9%) or early childhood (48%). The syndrome is sometimes called Bean syndrome, after William Bean, who in 1958 was the first to characterize the lesions as compressible blue or purple cutaneous lesions with a diameter of 1-2 cm 2 that are asymptomatic and rarely bleed spontaneously but bleed easily upon trauma. Patients with BRBNS can develop coagulopathy with low fibrinogen and high D-dimer. In addition, they present with serious anemia, which can be treated with lifelong iron supplements and frequent blood transfusions secondary to chronic GI bleeding. Some reported cases of BRBNS showed an autosomal dominant pattern of inheritance, but most cases are sporadic. Variable therapeutic modalities are used in the management of BRBNS, including antiangiogenic agents such as corticosteroids and interferon-alpha, octreotide, sclerotherapy and aggressive surgery [1,2,5,. According to multiple case reports, sirolimus (SRL), a mammalian target of the rapamycin (mTOR) inhibitor, has been found to be a promising therapy for vascular anomalies. Herein, we report the first Middle Eastern case in the literature: a patient with BRBNS that was successfully treated with sirolimus after the failure of several modalities of management. Case Presentation Our patient is an 18-year-old female who visited King Faisal Specialist Hospital and Research Center in 2004, where she was diagnosed with BRBNS involving the gastrointestinal tract, brain and skin. Her condition has been managed for many years with multiple modalities, including medical as well as chemotherapy (cyclosporine and vincristine) and interventional radiology performed by pediatric hematology oncology and radiology teams. Unfortunately, these measures failed to control her disease. In addition, because of gastrointestinal (GI) bleeding, the patient developed chronic anemia and since that time has been transfusion dependent on an almost weekly basis with no improvement of her hemoglobin. She would always present with low readings of WBC (2.0-3.8 10 9 //L), RBC (1.1-2.1 10 12 /L), hemoglobin (31-55 g/L) and platelets (112-140 10 9 /L). In 2012, the patient travelled abroad where she had multiple surgical procedures, including surgical resection, laser and sclerotherapy of GI lesions, that resulted in complete control of her disease with no requirement of blood transfusions since. Part of her preventive management plan was sirolimus 1 mg (0.7 mg/m 2 /day) PO, which resulted in improvement of her lab readings: hemoglobin (115 g/L), RBC (4.87 10 12 /L), platelets (176 10 9 /L) and WBC (4.21 10 9 /L). Since 2012, until her presentation to the dermatology clinic in 2017, the patient has been on sirolimus 1 mg PO daily. However, the patient started to develop new lesions over her back and feet beginning 1 year prior. Therefore, in May 2017, when she visited the dermatology clinic, we adjusted the dose of sirolimus to 2 mg (1.4 mg/m 2 /day). In subsequent follow-up visits, the patient's condition was improving with maintained hemoglobin and no development of new active lesions, except for low WBC (3.16 10 9 /L). Enhanced and non-enhanced magnetic resonance imaging (MRI) of the patient's head and neck were done in October 2017 and found that there was a 4-cm decrease in the size of the lesion on her lower left neck in comparison to her last MRI done in 2010. Discussion BRBNS is a rare vascular anomaly syndrome characterized by multifocal lesions consisting of venous malformations, which appear prominently in the GI tract, skin and soft tissues. As the patient grows, the number and size of lesions tend to increase. One of the persistent complications of BRBNS is iron deficiency anemias due to chronic bleeding from GI lesions, which patients commonly experience at an early age. Therefore, patients require lifelong iron replacement, and in severe cases, regular blood transfusions might be required because of chronic anemias. This was true in our case, in which the patient had very low hemoglobin (31-55 g/L) and was thus receiving PRBC on a weekly basis. Octreotide has been used in BRBNS, resulting in an improvement of symptoms with a decrease in transfusions over 12 months of follow-up, but repeated video capsule endoscopy revealed one case with no changes in the number and size of lesions. Antiangiogenic agents, such as steroids, interferon-alpha and propranolol, were used, but the lesions failed to reduce in size and number when steroids were used alone. Furthermore, when steroids and interferon-alpha were used together, the lesions regrew after treatment was discontinued. No convincing evidence has been reported for long-lasting effects from any pharmacological treatment. Laser photocoagulation and endoscopic removal have been tried, but without lifelong success. Surgical removal is another modality of bleeding management in BRBNS, but it has been discouraged because of its aggressiveness and the probability that the excised lesions would regrow. Sirolimus is a mammalian target of the rapamycin (mTOR) inhibitor. mTOR is a protein complex that contributes to several cellular processes, such as cell growth, cell survival and angiogenesis. Additionally, it has potent antineoplastic and immunosuppressive properties. With our patient, hemoglobin levels normalized after starting sirolimus, with no need for further blood transfusions. We observed no adverse effects, such as hyperlipidemia, mucositis, diarrhea, hepatic damage, azotemia, proteinuria, anemia, thrombocytopenia, and recurrent or severe systemic infection, except for neutropenia. The first published case report of sirolimus in the treatment of BRBNS was in 2012; it demonstrated the efficacy of sirolimus at low doses to treat skin lesions and reduce GI bleeding after the failure of numerous management modalities. In the past few years, multiple cases of BRBNS managed with sirolimus were reported to exhibit positive effects [1,12,. To our knowledge, our case represents the first case in the Middle East to show the efficacy of sirolimus in treating chronic iron deficiency anemia in BRBNS. Conclusion BRBNS is a rare multifocal VM that most commonly affects the skin, subcutaneous tissue and GI tract. Commonly used therapeutic modalities to treat BRBNS include corticosteroids, interferon-alpha, octreotide, sclerotherapy and aggressive surgery. However, these modalities often fail to produce long-lasting effects or are unfavorable due to their aggressive nature. |
/**
*
* Copyright 2008 - 2011
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* @project loon
* @author cping
* @email javachenpeng@yahoo.com
* @version 0.1.3
*/
public abstract class LGame extends Activity {
private static Class<?> getType(Object o) {
if (o instanceof Integer) {
return Integer.TYPE;
} else if (o instanceof Float) {
return Float.TYPE;
} else if (o instanceof Double) {
return Double.TYPE;
} else if (o instanceof Long) {
return Long.TYPE;
} else if (o instanceof Short) {
return Short.TYPE;
} else if (o instanceof Short) {
return Short.TYPE;
} else if (o instanceof Boolean) {
return Boolean.TYPE;
} else {
return o.getClass();
}
}
public void register(final LSetting setting,
final Class<? extends Screen> clazz, final Object... args) {
this._listener = setting.listener;
this.maxScreen(setting.width, setting.height);
this.initialization(setting.landscape, setting.mode);
this.setShowFPS(setting.showFPS);
this.setShowMemory(setting.showMemory);
this.setShowLogo(setting.showLogo);
this.setFPS(setting.fps);
if (clazz != null) {
if (args != null) {
final int currentOrientation = LSystem.screenActivity
.getResources().getConfiguration().orientation;
Runnable runnable = new Runnable() {
@Override
public void run() {
try {
final int funs = args.length;
if (funs == 0) {
setScreen(clazz.newInstance());
if (setting.landscape
&& currentOrientation != android.content.res.Configuration.ORIENTATION_LANDSCAPE) {
LSystem.screenActivity
.setRequestedOrientation(android.content.pm.ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
} else if (!setting.landscape
&& currentOrientation != android.content.res.Configuration.ORIENTATION_PORTRAIT) {
LSystem.screenActivity
.setRequestedOrientation(android.content.pm.ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
showScreen();
} else {
Class<?>[] functions = new Class<?>[funs];
for (int i = 0; i < funs; i++) {
functions[i] = getType(args[i]);
}
Constructor<?> constructor = Class.forName(
clazz.getName()).getConstructor(
functions);
Object o = constructor.newInstance(args);
if (o != null && (o instanceof Screen)) {
setScreen((Screen) o);
if (setting.landscape
&& currentOrientation != android.content.res.Configuration.ORIENTATION_LANDSCAPE) {
LSystem.screenActivity
.setRequestedOrientation(android.content.pm.ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
} else if (!setting.landscape
&& currentOrientation != android.content.res.Configuration.ORIENTATION_PORTRAIT) {
LSystem.screenActivity
.setRequestedOrientation(android.content.pm.ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
showScreen();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
};
LSystem.getOSHandler().post(runnable);
}
}
}
public static enum LMode {
Defalut, Max, Fill, FitFill, Ratio, MaxRatio
}
public static enum Location {
LEFT, RIGHT, TOP, BOTTOM, BOTTOM_LEFT, BOTTOM_RIGHT, CENTER, ALIGN_BASELINE, ALIGN_LEFT, ALIGN_TOP, ALIGN_RIGHT, ALIGN_BOTTOM, ALIGN_PARENT_LEFT, ALIGN_PARENT_TOP, ALIGN_PARENT_RIGHT, ALIGN_PARENT_BOTTOM, CENTER_IN_PARENT, CENTER_HORIZONTAL, CENTER_VERTICAL;
}
private boolean keyboardOpen, isDestroy;
private int orientation;
private LGameView gameView;
private FrameLayout frameLayout;
private Listener _listener;
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
try {
LSystem.screenActivity = LGame.this;
LGame.this.frameLayout = new FrameLayout(LGame.this);
LGame.this.isDestroy = true;
Runnable runnable = new Runnable() {
@Override
public void run() {
LGame.this.onMain();
}
};
runOnUiThread(runnable);
} catch (Throwable ex) {
LSystem.screenActivity = LGame.this;
LGame.this.frameLayout = new FrameLayout(LGame.this);
LGame.this.isDestroy = true;
LGame.this.onMain();
}
Log.i("Android2DActivity", "LGame 2D Engine Start");
}
public void setActionBarVisibility(boolean visible) {
if (LSystem.isAndroidVersionHigher(11)) {
try {
java.lang.reflect.Method getBarMethod = Activity.class
.getMethod("getActionBar");
Object actionBar = getBarMethod.invoke(this);
if (actionBar != null) {
java.lang.reflect.Method showHideMethod = actionBar
.getClass().getMethod((visible) ? "show" : "hide");
showHideMethod.invoke(actionBar);
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
protected void initialization(final boolean landscape) {
initialization(landscape, LMode.Ratio);
}
protected void initialization(final boolean landscape, final LMode mode) {
initialization(landscape, true, mode);
}
/**
* 以指定倾斜方式显示游戏画面
*
* @param width
* @param height
* @param landscape
*/
protected void initialization(final int width, final int height,
final boolean landscape) {
initialization(width, height, landscape, LMode.Ratio);
}
/**
* 以指定倾斜方式显示游戏画面
*
* @param width
* @param height
* @param landscape
* @param mode
*/
protected void initialization(final int width, final int height,
final boolean landscape, final LMode mode) {
maxScreen(width, height);
initialization(landscape, mode);
}
protected void initialization(final boolean landscape,
final boolean fullScreen, final LMode mode) {
if (!landscape) {
if (LSystem.MAX_SCREEN_HEIGHT > LSystem.MAX_SCREEN_WIDTH) {
int tmp_height = LSystem.MAX_SCREEN_HEIGHT;
LSystem.MAX_SCREEN_HEIGHT = LSystem.MAX_SCREEN_WIDTH;
LSystem.MAX_SCREEN_WIDTH = tmp_height;
}
}
this.gameView = new LGameView(LGame.this, mode, fullScreen, landscape);
if (mode == LMode.Defalut) {
// 添加游戏View,显示为指定大小,并居中
this.addView(gameView.getView(), gameView.getWidth(),
gameView.getHeight(), Location.CENTER);
} else if (mode == LMode.Ratio) {
// 添加游戏View,显示为屏幕许可范围,并居中
this.addView(gameView.getView(), gameView.getMaxWidth(),
gameView.getMaxHeight(), Location.CENTER);
} else if (mode == LMode.MaxRatio) {
// 添加游戏View,显示为屏幕许可的最大范围(可能比单纯的Ratio失真),并居中
this.addView(gameView.getView(), gameView.getMaxWidth(),
gameView.getMaxHeight(), Location.CENTER);
} else if (mode == LMode.Max) {
// 添加游戏View,显示为最大范围值,并居中
this.addView(gameView.getView(), gameView.getMaxWidth(),
gameView.getMaxHeight(), Location.CENTER);
} else if (mode == LMode.Fill) {
// 添加游戏View,显示为全屏,并居中
this.addView(gameView.getView(), 0xffffffff, 0xffffffff,
Location.CENTER);
} else if (mode == LMode.FitFill) {
// 添加游戏View,显示为按比例缩放情况下的最大值,并居中
this.addView(gameView.getView(), gameView.getMaxWidth(),
gameView.getMaxHeight(), Location.CENTER);
}
if (LSystem.isAndroidVersionHigher(11)) {
View rootView = getWindow().getDecorView();
try {
java.lang.reflect.Method m = View.class.getMethod(
"setSystemUiVisibility", int.class);
m.invoke(rootView, 0x0);
m.invoke(rootView, 0x1);
} catch (Exception ex) {
}
}
}
public abstract void onMain();
/**
* 弹出输入框
*
* @param listener
* @param title
* @param message
*/
public void showAndroidTextInput(final TextEvent listener,
final String title, final String message) {
if (listener == null) {
return;
}
final LGameTools.ClickAndroid OK = new LGameTools.ClickAndroid(
listener, 0);
final LGameTools.ClickAndroid CANCEL = new LGameTools.ClickAndroid(
listener, 1);
android.app.AlertDialog.Builder builder = new android.app.AlertDialog.Builder(
LGame.this);
builder.setTitle(title);
final android.widget.EditText input = new android.widget.EditText(
LGame.this);
input.setText(message);
input.setSingleLine();
OK.setInput(input);
builder.setView(input);
builder.setPositiveButton("Ok", OK);
builder.setOnCancelListener(CANCEL);
builder.show();
}
/**
* 弹出指定的HTML页面
*
* @param listener
* @param title
* @param url
*/
public void showAndroidOpenHTML(final ClickEvent listener,
final String title, final String url) {
if (listener == null) {
return;
}
final LGameTools.ClickAndroid OK = new LGameTools.ClickAndroid(
listener, 0);
final LGameTools.ClickAndroid CANCEL = new LGameTools.ClickAndroid(
listener, 1);
final LGameTools.Web web = new LGameTools.Web(LGame.this, url);
android.app.AlertDialog.Builder builder = new android.app.AlertDialog.Builder(
LGame.this);
builder.setCancelable(true);
builder.setTitle(title);
builder.setView(web);
builder.setPositiveButton("Ok", OK).setNegativeButton("Cancel", CANCEL);
builder.show();
}
/**
* 弹出选择框
*
* @param listener
* @param title
* @param text
*/
public void showAndroidSelect(final SelectEvent listener,
final String title, final String text[]) {
if (listener == null) {
return;
}
final LGameTools.ClickAndroid ITEM = new LGameTools.ClickAndroid(
listener, 0);
final LGameTools.ClickAndroid CANCEL = new LGameTools.ClickAndroid(
listener, 1);
final android.app.AlertDialog.Builder builder = new android.app.AlertDialog.Builder(
LGame.this);
builder.setTitle(title);
builder.setItems(text, ITEM);
builder.setOnCancelListener(CANCEL);
android.app.AlertDialog alert = builder.create();
alert.show();
}
/**
* 弹出Yes or No判定
*
* @param title
* @param message
* @param cancelable
* @param yes
* @param no
* @param onYesClick
* @param onNoClick
*/
public void showAndroidYesOrNo(String title, String message,
boolean cancelable, String yes, String no,
android.content.DialogInterface.OnClickListener onYesClick,
android.content.DialogInterface.OnClickListener onNoClick) {
final android.app.AlertDialog.Builder builder = new android.app.AlertDialog.Builder(
LGame.this);
builder.setTitle(title);
builder.setMessage(message);
builder.setPositiveButton(yes, onYesClick);
builder.setNegativeButton(no, onNoClick);
builder.setCancelable(cancelable);
builder.create();
builder.show();
}
public boolean isGamePadBackExit() {
return !LSystem.isBackLocked;
}
public void setGamePadBackExit(boolean flag) {
LSystem.isBackLocked = !flag;
}
public View inflate(final int layoutID) {
final android.view.LayoutInflater inflater = android.view.LayoutInflater
.from(this);
return inflater.inflate(layoutID, null);
}
public void addView(final View view, Location location) {
if (view == null) {
return;
}
addView(view, android.view.ViewGroup.LayoutParams.WRAP_CONTENT,
android.view.ViewGroup.LayoutParams.WRAP_CONTENT, location);
}
public void addView(final View view, int w, int h, Location location) {
if (view == null) {
return;
}
android.widget.RelativeLayout viewLayout = new android.widget.RelativeLayout(
LGame.this);
android.widget.RelativeLayout.LayoutParams relativeParams = LSystem
.createRelativeLayout(location, w, h);
viewLayout.addView(view, relativeParams);
addView(viewLayout);
}
public void addView(final View view) {
if (view == null) {
return;
}
frameLayout.addView(view, createLayoutParams());
try {
if (view.getVisibility() != View.VISIBLE) {
view.setVisibility(View.VISIBLE);
}
} catch (Exception e) {
}
}
public void removeView(final View view) {
if (view == null) {
return;
}
frameLayout.removeView(view);
try {
if (view.getVisibility() != View.GONE) {
view.setVisibility(View.GONE);
}
} catch (Exception e) {
}
}
/**
* 假的,糊弄反编译者中小白用……
*
* @param ad
* @return
*/
public int setAD(String ad) {
int result = 0;
try {
Class<LGame> clazz = LGame.class;
java.lang.reflect.Field[] field = clazz.getDeclaredFields();
if (field != null) {
result = field.length;
}
} catch (Exception e) {
}
return result + ad.length();
}
protected void maxScreen() {
RectBox rect = getScreenDimension();
maxScreen(rect.width, rect.height);
}
protected void maxScreen(int w, int h) {
LSystem.MAX_SCREEN_WIDTH = w;
LSystem.MAX_SCREEN_HEIGHT = h;
}
protected void showScreen() {
setContentView(frameLayout);
try {
getWindow().setBackgroundDrawable(null);
} catch (Exception e) {
}
}
public void checkConfigChanges(android.content.Context context) {
try {
final int REQUIRED_CONFIG_CHANGES = android.content.pm.ActivityInfo.CONFIG_ORIENTATION
| android.content.pm.ActivityInfo.CONFIG_KEYBOARD_HIDDEN;
android.content.pm.ActivityInfo info = this.getPackageManager()
.getActivityInfo(
new android.content.ComponentName(context,
this.getPackageName() + "."
+ this.getLocalClassName()), 0);
if ((info.configChanges & REQUIRED_CONFIG_CHANGES) != REQUIRED_CONFIG_CHANGES) {
new android.app.AlertDialog.Builder(this)
.setMessage(
"LGame Tip : Please add the following line to the Activity manifest .\n[configChanges=\"keyboardHidden|orientation\"]")
.show();
}
} catch (Exception e) {
Log.w("Android2DView",
"Cannot access game AndroidManifest.xml file !");
}
}
public FrameLayout getFrameLayout() {
return frameLayout;
}
public android.content.pm.PackageInfo getPackageInfo() {
try {
String packName = getPackageName();
return getPackageManager().getPackageInfo(packName, 0);
} catch (Exception ex) {
}
return null;
}
public String getVersionName() {
android.content.pm.PackageInfo info = getPackageInfo();
if (info != null) {
return info.versionName;
}
return null;
}
public int getVersionCode() {
android.content.pm.PackageInfo info = getPackageInfo();
if (info != null) {
return info.versionCode;
}
return -1;
}
@Override
public void onConfigurationChanged(android.content.res.Configuration config) {
super.onConfigurationChanged(config);
orientation = config.orientation;
keyboardOpen = config.keyboardHidden == android.content.res.Configuration.KEYBOARDHIDDEN_NO;
}
protected FrameLayout.LayoutParams createLayoutParams() {
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(
0xffffffff, 0xffffffff);
layoutParams.gravity = Gravity.CENTER;
return layoutParams;
}
/**
* 设定常规图像加载方法的扩大值
*
* @param sampleSize
*/
public void setSizeImage(int sampleSize) {
LSystem.setPoorImage(sampleSize);
}
/**
* 取出第一个Screen并执行
*
*/
public void runFirstScreen() {
if (LSystem.screenProcess != null) {
LSystem.screenProcess.runFirstScreen();
}
}
/**
* 取出最后一个Screen并执行
*/
public void runLastScreen() {
if (LSystem.screenProcess != null) {
LSystem.screenProcess.runLastScreen();
}
}
/**
* 运行指定位置的Screen
*
* @param index
*/
public void runIndexScreen(int index) {
if (LSystem.screenProcess != null) {
LSystem.screenProcess.runIndexScreen(index);
}
}
/**
* 运行自当前Screen起的上一个Screen
*/
public void runPreviousScreen() {
if (LSystem.screenProcess != null) {
LSystem.screenProcess.runPreviousScreen();
}
}
/**
* 运行自当前Screen起的下一个Screen
*/
public void runNextScreen() {
if (LSystem.screenProcess != null) {
LSystem.screenProcess.runNextScreen();
}
}
/**
* 向缓存中添加Screen数据,但是不立即执行
*
* @param screen
*/
public void addScreen(Screen screen) {
if (LSystem.screenProcess != null) {
LSystem.screenProcess.addScreen(screen);
}
}
/**
* 切换当前窗体为指定Screen
*
* @param screen
*/
public void setScreen(Screen screen) {
if (LSystem.screenProcess != null) {
LSystem.screenProcess.setScreen(screen);
}
}
/**
* 获得保存的Screen列表
*
* @return
*/
public LinkedList<Screen> getScreens() {
if (LSystem.screenProcess != null) {
return LSystem.screenProcess.getScreens();
}
return null;
}
/**
* 获得缓存的Screen总数
*/
public int getScreenCount() {
if (LSystem.screenProcess != null) {
return LSystem.screenProcess.getScreenCount();
}
return 0;
}
public void setEmulatorListener(EmulatorListener emulator) {
if (LSystem.screenProcess != null) {
LSystem.screenProcess.setEmulatorListener(emulator);
}
}
protected void setShowFPS(boolean flag) {
if (gameView != null) {
this.gameView.setShowFPS(flag);
}
}
protected void setShowMemory(boolean flag) {
if (gameView != null) {
this.gameView.setShowMemory(flag);
}
}
protected void setFPS(long frames) {
if (gameView != null) {
this.gameView.setFPS(frames);
}
}
protected void setShowLogo(boolean showLogo) {
if (gameView != null) {
gameView.setShowLogo(showLogo);
}
}
protected void setLogo(LTexture img) {
if (gameView != null) {
gameView.setLogo(img);
}
}
public RectBox getScreenDimension() {
DisplayMetrics dm = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(dm);
return new RectBox(dm.xdpi, dm.ydpi, dm.widthPixels, dm.heightPixels);
}
public LGameView gameView() {
return gameView;
}
/**
* 键盘是否已显示
*
* @return
*/
public boolean isKeyboardOpen() {
return keyboardOpen;
}
/**
* 当前窗体方向
*
* @return
*/
public int getOrientation() {
return orientation;
}
/**
* 退出当前应用
*/
public void close() {
finish();
}
public boolean isDestroy() {
return isDestroy;
}
/**
* 设定是否在Activity注销时强制关闭整个程序
*
* @param isDestroy
*/
public void setDestroy(boolean isDestroy) {
this.isDestroy = isDestroy;
if (!isDestroy) {
LSystem.isBackLocked = true;
}
}
public boolean isBackLocked() {
return LSystem.isBackLocked;
}
/**
* 设定锁死BACK事件不处理
*
* @param isBackLocked
*/
public void setBackLocked(boolean isBackLocked) {
LSystem.isBackLocked = isBackLocked;
}
@Override
protected void onPause() {
if (gameView == null) {
return;
}
if (_listener != null) {
_listener.onPause();
}
gameView.pause();
onGamePaused();
if (isFinishing()) {
gameView.destroy();
}
if (gameView != null && gameView.getView() != null) {
if (gameView.getView() instanceof GLSurfaceViewCupcake) {
((GLSurfaceViewCupcake) gameView.getView()).onPause();
}
}
super.onPause();
}
@Override
protected void onResume() {
if (gameView == null) {
return;
}
if (_listener != null) {
_listener.onResume();
}
gameView.resume();
onGameResumed();
if (gameView != null && gameView.getView() != null) {
if (gameView.getView() instanceof GLSurfaceViewCupcake) {
((GLSurfaceViewCupcake) gameView.getView()).onResume();
}
}
super.onResume();
}
public abstract void onGameResumed();
public abstract void onGamePaused();
@Override
protected void onDestroy() {
try {
LSystem.isRunning = false;
if (_listener != null) {
_listener.onExit();
}
super.onDestroy();
// 当此项为True时,强制关闭整个程序
if (isDestroy) {
Log.i("Android2DActivity", "LGame 2D Engine Shutdown");
try {
this.finish();
System.exit(0);
} catch (Error empty) {
}
}
} catch (Exception e) {
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
boolean result = super.onCreateOptionsMenu(menu);
if (LSystem.screenProcess != null) {
if (LSystem.screenProcess.onCreateOptionsMenu(menu)) {
return true;
}
}
return result;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
boolean result = super.onOptionsItemSelected(item);
if (LSystem.screenProcess != null) {
if (LSystem.screenProcess.onOptionsItemSelected(item)) {
return true;
}
}
return result;
}
@Override
public void onOptionsMenuClosed(Menu menu) {
super.onOptionsMenuClosed(menu);
if (LSystem.screenProcess != null) {
LSystem.screenProcess.onOptionsMenuClosed(menu);
}
}
// 检查ADView状态,如果ADView上附着有其它View则删除,
// 从而起到屏蔽-广告屏蔽组件的作用。
public void safeguardAndroidADView(android.view.View view) {
try {
final android.view.ViewGroup vgp = (android.view.ViewGroup) view
.getParent().getParent();
if (vgp.getChildAt(1) != null) {
vgp.removeViewAt(1);
}
} catch (Exception ex) {
}
}
} |
//Calculates the average GPA of all students in each priority level
public void priorityGPA(ArrayList<Student> studentList) {
Double priorityLevelOneGPA = 0.0;
Double priorityLevelTwoGPA = 0.0;
Double priorityLevelThreeGPA = 0.0;
Double priorityLevelFourGPA = 0.0;
int levelOneStudents = 0;
int levelTwoStudents = 0;
int levelThreeStudents = 0;
int levelFourStudents = 0;
for(Student student : studentList){
if(student.getPriority() == 1){
priorityLevelOneGPA += student.getGPA();
levelOneStudents++;
} else if(student.getPriority() == 2){
priorityLevelTwoGPA += student.getGPA();
levelTwoStudents++;
} else if(student.getPriority() == 3) {
priorityLevelThreeGPA += student.getGPA();
levelThreeStudents++;
} else if(student.getPriority() == 4){
priorityLevelFourGPA += student.getGPA();
levelFourStudents++;
}
}
double totalGPAOne = Math.round((priorityLevelOneGPA / levelOneStudents) * 100);
double totalGPATwo = Math.round((priorityLevelTwoGPA / levelTwoStudents) * 100);
double totalGPAThree = Math.round((priorityLevelThreeGPA / levelThreeStudents) * 100);
double totalGPAFour = Math.round((priorityLevelFourGPA / levelFourStudents) * 100);
System.out.println("Average GPA of Priority 1 Students: " + totalGPAOne / 100 + "%");
System.out.println("Average GPA of Priority 2 Students: " + totalGPATwo / 100 + "%");
System.out.println("Average GPA of Priority 3 Students: " + totalGPAThree / 100 + "%");
System.out.println("Average GPA of Priority 4 Students: " + totalGPAFour / 100 + "%");
} |
<gh_stars>0
package Analysis;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
public class FileReaderAnalysis {
public ArrayList<String[]> FileReadForAnalysis() throws IOException {
BufferedReader br = new BufferedReader(new FileReader("test_file.csv"));
String line;
ArrayList<String[]> arr_str = new ArrayList<String[]>();
while ((line = br.readLine()) != null) {
arr_str.add(line.split(","));
}
return arr_str;
}
}
|
AnCora-Verb: A Lexical Resource for the Semantic Annotation of Corpora In this paper we present two large-scale verbal lexicons, AnCora-Verb-Ca for Catalan and AnCora-Verb-Es for Spanish, which are the basis for the semantic annotation with arguments and thematic roles of AnCora corpora. In AnCora-Verb lexicons, the mapping between syntactic functions, arguments and thematic roles of each verbal predicate it is established taking into account the verbal semantic class and the diatheses alternations in which the predicate can participate. Each verbal predicate is related to one or more semantic classes basically differentiated according to the four event classes -accomplishments, achievements, states and activities-, and on the diatheses alternations in which a verb can occur. AnCora-Verb-Es contains a total of 1,965 different verbs corresponding to 3,671 senses and AnCora-Verb-Ca contains 2,151 verbs and 4,513 senses. These figures correspond to the total of 500,000 words contained in each corpus, AnCora-Ca and AnCora-Es. The lexicons and the annotated corpora constitute the richest linguistic resources of this kind freely available for Spanish and Catalan. The big amount of linguistic information contained in both resources should be of great interest for computational applications and linguistic studies. Currently, a consulting interface for these lexicons is available at (http://clic.ub.edu/ancora/). Introduction In this paper we present two large-scale verbal lexicons, AnCora-Verb-Ca for Catalan and AnCora-Verb-Es for Spanish, which are the basis for the semantic annotation with arguments and thematic roles of AnCora corpora. At present, AnCora ) is the largest multilevel annotated corpus of Spanish and Catalan consisting of 500,000 words each mostly from newspaper articles. AnCora is annotated with morphological (PoS), syntactic (constituents and functions) and semantic (argument structure and thematic roles, semantic class, named entities and WordNet senses) information. In AnCora-Verb lexicons, the mapping between syntactic functions, arguments and thematic roles of each verbal predicate it is established taking into account the verbal semantic class and the diatheses alternations in which the predicate can participate. Each verbal predicate is related to one or more semantic classes, depending on its senses. The main goal of this paper is to present the content of these lexicons and their resulting projection in the AnCora corpora (section 2). A quantitative analysis of the data it is also presented (section 3). Finally, main conclusions are drawn in section 4. AnCora-Verb Lexicons AnCora-Verb lexicons were obtained by deriving, for each sense of each verb, all the syntactic schemata in which a verbal predicate appears in AnCora corpora. From this information, the mapping from syntactic functions to thematic roles, and the corresponding argument position, was fully manually encoded in the lexicons. The semantic properties used in the characterization of predicates are based on the proposal of lexical decomposition of Rappaport-Hovav & Levin from which the concept of Lexical Semantic Structure (LSS) has been taken. For the characterization of the argument structure, we follow PropBank annotation system () 1. In this direction, we follow the lines laid down by Kingsbury et al., in the construction of VerbNet. In AnCora-Verb lexicons, each predicate is related to one or more semantic classes (LSS), depending on its senses, basically differentiated according to the four event classes ─accomplishments (A), achievements (B), states (C) and activities (D)─, and on the diatheses alternations in which a verb can occur. Figure 1 shows the full information associated with the entry reforzar 'to reinforce': the lemma (reforzar), the different senses associated to their corresponding semantic classes (in this case LSS1.1 and LSS2.2), the mapping between syntactic function and thematic role (for instance, SUJ Arg0##CAU), and the diatheses alternations in which the verb occurs (in this case, ANTICAUSATIVA 'inchoative'). As we can observe, the expression of the causative-inchoative alternation entails an argument crossing: the affected object, appears as direct object in the causative structure (CD Arg1##TEM) and as subject in the inchoative structure (SUJ Arg1##TEM). Furthermore, the expression of this alternation also involves an aspectual change, since the causative reading corresponds with an accomplishment (LSS1.1) and the inchoative reading with an achievement (LSS2.2). Finally, examples are also included. "La subida en dos dcimas de la tasa de paro reforz la tendencia al alza" 2 +ANTICAUSATIVA LSS2.2 (B2) SUJ Arg1##TEM EX: "Si dos neuronas se activan, sus conexiones se refuerzan" 3 Figure 1: Lexical entry of reforzar 'to reinforce' in AnCora-Verb-Es In order to guarantee the coherence and quality and to ensure the correct mapping between arguments, thematic roles, syntactic functions and LSS, inter-annotator agreement tests were carried out in the building process of the verbal lexicons. After a first proposal of verb classes and their corresponding arguments and theta-roles, a group of seven trained linguists elaborated a subset of 30 verbal entries. The resulting entries were compared, the disagreements discussed and the verb classes modified when necessary. This process was applied over several subsets of 30 verbs until no relevant disagreements arose. Disagreements were mainly due to differences in class assignment (LSS), and therefore also in the thematic role assignment. For example, in Spanish, a verb in a passive ('pasiva refleja') or inchoative ('anticausativa) construction can appear with the pronoun se, and it is not always easy to decide which of them the correct interpretation is and, obviously, the consequences are also very different. If we opt for the passive reading, the Arg0 is an Agent, whereas if we choose the inchoative reading the Arg0 is a Causer. The identification of multiwords, for instance the treatment of light verbs, is also especially problematic, basically when it is necessary to decide if a given structure corresponds to a verb and its complements or to an idiom (tener + ganas vs. tener_ganas, 'to need' or 'to want'). Next we present the 13 semantic classes that have been used for the characterization of verbal predicates: Automatic Annotation AnCora-Verb lexicons were used for the semiautomatic tagging of the AnCora corpora with arguments, thematic roles and semantic classes. A set of manually written rules automatically mapped part of the information declared in these lexicons onto the syntactic structure (). We defined three different types of rules taking into account the kind of information they were based on: a) Rules based on a specific function or morphosyntactic property. For example, if the predicate has associated the verbal morpheme 'PASS' (passive voice), then its subject has the argument position Arg1 and the thematic role patient (SUJ-Arg1-PAT). b) Rules based on the semantic properties of the predicates. For instance, when predicates are monosemic, the mapping between syntactic function and argument and thematic role as well as the assignment of the semantic class is directly realized. In the case of polysemic verbs, the mapping can be partial because it is only automatically assigned the unambiguous information. c) Rules based on the type of adverb or prepositional multiword appearing in a specific constituent. For instance, if the prepositional multiword a_causa_de ('because_of') or the adverb an ('still', 'yet') in Spanish, appears in an adverbial complement (function = CC), then it is automatically assigned the argument and thematic role ArgM-CAU (an adjunct argument with the thematic role cause) as well as ArgM-TMP (an adjunct argument with the thematic role temporal) respectively. We applied these rules following a decreasing heuristic according to the degree of generality, that is, we applied first the more general rules of type a), secondly the type c) rules and, finally, the type b) rules. In the automatic annotation process we obtained either full annotationscontaining information about the arguments and the thematic roles-or partial annotations with only arguments or thematic roles. This procedure permits to automatically annotate 60% 4 of the expected arguments and thematic roles with a fairly low error (below 2%) (). Given the high quality of the results obtained we claim that this methodology is very suited for the semiautomatic approach to corpus annotation and able to save a significant amount of manual effort. Afterwards we manually completed the thematic role annotation in order to guarantee the accuracy required to support the final resource. The Catalan corpus, AnCora-Ca, is already completed for the 500,000 words, while the semantic manual checking covers, up to now, the 100,000 words of the Spanish corpus, AnCora-Es. The Spanish corpus will be completed at the end of this year. Quantitative Analysis of Data The Spanish lexicon, AnCora-Verb-Es, contains a total of 1965 different verbs (corresponding to 3671 senses) and the Catalan lexicon, AnCora-Verb-Ca, contains 2151 verbs (corresponding to 4513 senses). In table 1, the distribution of these verbs' senses in semantic classes it is shown for both languages. The average of senses per lemmata is 1,86 for Spanish and 2,09 for Catalan. Table 1 shows that the semantic class with the highest number of different verbs, in both languages, is by far the transitive-agentive class (A2) followed by the unaccusative-state class (B2) and the causative-transitive class (A1). It has to be noticed that in B2 class the passive or inchoative constructions coming from other classes (A1, A2 and A3) as result of a diatheses alternation are also included. For instance, the passive alternation of the verbal predicate verificar 'to verify' (from A2 semantic class) is annotated as B2 (See figure 2). The expression of most alternations entails an aspectual change, which necessarily implies a change of semantic class. "() que es verifiqui l'honradesa dels crrecs publics" 6 Figure 2: Lexical entry of verificar 'to verify' in AnCora-Verb-Ca Next we present the figures corresponding to the projection of AnCora-Verb-Es and AnCora-Verb-Ca lexicons in AnCora-Es and AnCora-Ca corpora respectively (See table 2 and table 3). For the quantitative analysis of the data we have taken into account the 500,000 words fully annotated for Catalan and a subset of 100,000 words for Spanish. These figures correspond to the total amount of semantic annotated data manually checked. The Spanish subset comprises a total amount of 11,061 verbal tokens, corresponding to 2613 senses ( Verbs belonging to A32, C1, B1, A1 and D1 semantic classes represent a little bit more than the 25% of the total verbal predicates appeared in the corpora, the 26.24% for Spanish and 27.88% for Catalan. Whereas the rest of verbal classes -A31, C4, C3, D2 and D3represent the 3.97% and the 3.58% of the total verbal occurrences in AnCora-Es and AnCora-Ca corpora respectively. In order to get more information about how verbal predicates are distributed in each semantic class, we have obtained the frequency of the 10 more frequent lemmata for each class and its corresponding percentage with respect to the total amount of the class (See Table 4 for Spanish and Table 5 for Catalan). Notice that despite the difference in corpus size, the percentages overlap to a great extent. However, this overlapping does not take place in all verbs. Table 4 and 5 show that, for example, in the attributivestate class (C2) and the beneficiary-state class (C4), the 10 more frequent lemmata represent the 83,9% and the 75.89% of the total verbal tokens of these classes for Spanish, and for Catalan the 88,94% (C2) and the 92.8% (C4). The same subset in the scalar-state class (C3) in Catalan covers also the 92.8% of the total class tokens. Therefore, the state classes have few verbal types but they present a very high occurrence in both corpora. In fact, the verb ser ('to be') is the one with the highest frequency in both languages. On the opposite side we find the unaccusative-state class (B2), in which the 10 more frequent lemmata only represent the 8.8% for Spanish and the 14.34% for Catalan. It is important to highlight that nine of the thirteen Catalan semantic classes -A31, A32, C1, C2, C3, C4, D1, D2 and D3-cover, with the 10 more frequent lemmata, more than the 50% of the total amount of verbal occurrences in each class. In the case of the Spanish subset, the number of classes is eight -A31, A32, C1, C2, C3, C4, D2 and D3-. Only four classes in Catalan -A1, A2, B1 and B2-and five in Spanish -A1, A2, B1, B2 and D1-are below 50%, probably because they are also the classes with more different verbal lemmata, and more sparsely distributed too. Conclusions and Further Work We have presented the lexicons AnCora-Verb-Ca and AnCora-Verb-Es, focusing on the content of the entries and the quantitative analysis of the data projected in AnCora corpora. The lexicons and the annotated corpora constitute the richest linguistic resources of this kind freely available for Spanish and Catalan. The big amount of linguistic information contained in both resources should be of great interest for computational applications and linguistic studies. As future lines of research, we can consider the linking of AnCora lexicons with other lexical resources, such as VerbNet, FrameNet and WordNet. These lexical resources codify different type of linguistic knowledge and the creation of a common base that links all of them together will allow them to benefit from one another. Currently, a consulting interface for these lexicons is 9 We have not considered the D2 and D3 semantic classes because they have less than 6 different lemmata per class. |
Self-consistent scattering theory for the radiative transport equation. We study light propagation in a random medium governed by the radiative transport equation. We present a theory for the transport equation with an inhomogeneous absorption coefficient. We obtain an analytical expression for the specific intensity in a uniform absorbing and scattering medium containing a point absorber. Using that result we derive a self-consistent system of integral equations to study a collection of point absorbers. We show numerical results that demonstrate the use of this theory. |
In vitro fusion of Acanthamoeba phagolysosomes. III. Evidence that cyclic nucleotides and vacuole subpopulations respectively control the rate and the extent of vacuole fusion in Acanthamoeba homogenates. Fusion of phagolysosomes has been previously demonstrated to occur during the incubation of phagolysosome-containing homogenates of Acanthamoeba (Oates and Touster, 1978, J. Cell Biol. 79:217-234). Further studies on this system have shown that methylxanthines (0.2 mM) and/or cAMP (0.5-1 mM) markedly accelerate the average rate, but not the extent, of the in vitro phagolysosome fusion process. Adenosine, 5'-AMP, and ADP (0.5-1 mM) were without effect. ATP (0.5-1 mM) caused variable stimulation, whereas beta, gamma-methylene-ATP (1 mM) caused pronounced inhibition, as did GTP (1 mM) and cGMP (1 mM). Stimulation by 3-isobutyl-1-methylxanthine was blocked by GTP, but not by ATP or cAMP. These results indicate that the rate of phagolysosome fusion in Acanthamoeba homogenates may be regulated by cyclic nucleotides, with enhancement of the fusion rate by cAMP and inhibition of the rate by cGMP. The extent of the reaction increased spontaneously and markedly during the first few hours after preparation of the homogenates. This activation appears to be because of a slow conversion of a significant fraction of the vacuole population from a fusion-incompetent to a fusion-competent, cyclic nucleotide-sensitive state. the rate of the fusion reaction is dependent, namely adenyl and guanyl nucleotides. Evidence is also presented that the extent of the fusion reaction is dependent on the relative size of a fusion-competent subpopulation. An abstract of part of this work has been previously published. Assay Methods In most experiments with homogenates prepared from exponential phase cells, fusion was monitored by measuring the postincubation levels of hybrid (red blood cell and yeast-containing) vacuoles (Hv), as described previously. In all experiments with homogenates prepared from stationary phase cells, and in some with exponential phase cell homogenates, the fusion reaction was studied in homogenates containing vacuoles labeled with red blood cells only. In these cases, the extent of fusion (%F) was defined as the number of fusion events which had occurred (equal to the decrease in the total number of vacuoles) relative to the maximum number of fusion events which could possibly have occurred (equal to the initial vacuole concentration, Vo, when Vo >> 1), x 100. Under conditions where the particlecontaining vacuoles are stable, %F is given by: where No is the average number of particles per vacuole before the 30°C incubation, and N is the same quantity after the 'Abbreviations used in this paper:,,y-CH2-ATP, /3, ymethylene-adenosine 5'-triphosphate ; Hv, hybrid, i.e., red blood cell-and yeast-containing vacuole; MIX, 3isobutyl-l-methylxanthine ; Rv, red blood cell-containing vacuole; Yv, yeast-containing vacuole. incubation. These values were obtained by examining pre-and postincubation samples under oil immersion bright field microscopy in the presence of 0.3% Eosin Y, as previously described ; 600-800 vacuoles (800-1,500 particles) were counted per sample. The total number of particles in the vacuoles counted was divided by the total number of vacuoles counted to give N or No, from which %F was calculated by Eq. l. Vacuole stability was monitored by the dye exclusion assay previously described. In some experiments, the number of particles in various vacuole subgroups was determined at different time points during the fusion reaction. Results were expressed relative to the total number of membrane-bounded particles, a quantity which remains constant during the reaction (see Results). Vacuoles were classified under oil immersion as monomers, dimers, trimer,... etc., on the basis of the number of particles they contained (1, 2, 3,... etc.). The frequency of particles in each vacuole class was obtained by dividing the total number of particles found in a given class by the total number of particles counted in all classes (--1,600 particles per sample). Electron microscopy was performed as described before, except that to minimize handling of the rather fragile giant hybrid vacuoles (see Results), fixation of the undiluted reaction mixtures was done in these cases by carefully overlaying the relatively viscous homogenates with cold, unbuffered 1% OsO, which was 0.3 M in sucrose. The samples were allowed to fix for 4 h at 0°C before the processing plugs were inserted and the specimens were then embedded, sectioned, and stained as previously reported. Rate of the Fusion Reaction In homogenates prepared from stationary phase cells (1.8-2.5 x 106/ml), the rate of the in vitro fusion reaction slowed after the first 5 min and reached a plateau by 30 min (Fig. 1, open symbols). If MIX, a potent and specific inhibitor of cyclic nucleotide phosphodiesterase activity, was added to the homogenates before the incubation, the fusion reaction did not slow down after 5 min, but continued at the same rate and reached the same plateau value as reached by the control system in only -7 min (Fig. 1, half-filled diamonds). Theophylline (1,3-dimethylxanthine) was observed to cause the same effect, but it was somewhat less potent than MIX, as has been reported in other systems (data not shown). A result virtually identical to that found with 0.2 mM MIX was obtained in the absence of the phosphodiesterase inhibitor when the homogenates were supplemented with relatively high (nominally 0.5 or 1 mM) concentrations of CAMP ( Fig. 1, half-filled circles and triangles). system) was ruled out by examination of 10-min samples from the above reaction mixtures by electron microscopy. No evidence of adhesion in any of the samples was found by EM, and a significantly higher amount of fusion, i.e., a higher average number of particle profiles/vacuole, was found in the samples treated with cAMP (data not shown). As measured by dye exclusion, the integrity of the phagolysosome membranes was unaffected throughout the course of the incubation in the homogenates prepared from stationary phase cells, as previously reported for the reaction in homogenates made from exponential-phase cells. In addition, at the concentrations employed in this study, none of the compounds used to perturb the reaction system had any noticeable effect on phagolysosome stability, as measured by dye exclusion (data not shown). Stimulation of the above reaction rate, i.e., shortening of the time necessary to reach the reaction plateau, by MIX and/or cAMP was also seen in homogenates prepared from exponentially growing cells (4.0-5.0 x 105/ml). In this case, the control reached a plateau level by 10-15 min as previously reported, while the CAMP-supplemented reaction plateaued at the same level as the THE JOURNAL OF CELL BIOLOGY " VOLUME 85, 1980 control by 4-7 min (data not shown). The specificity of this rate-stimulating effect was then examined. As shown in Table 1, MIX and cAMP accelerated the fusion reaction to the same extent, causing an average increase of 70% at 10 min relative to the control, while adenosine, 5'-AMP, and ADP had no significant effect at 1 mM concentration (Table I) or at 0.5 mM (data not shown). The effect of ATP was variable (note the relatively large standard deviation for this value in Table I) : it either showed little effect or it stimulated as well as cAMP or MIX. In contrast, /3,y-CH 2-ATP inhibited the reaction -50% in each of three experiments (Table 1). In addition, GTP and cGMP were found to markedly inhibit the reaction (Table I). Finally, combination of MIX with cAMP or ATP did not cause significant additional fusion over that seen with MIX alone, but GTP was observed to block the stimulatory effect of MIX (Table I). These results indicate that the rate, but not the extent, of the in vitro fusion reaction depends on the endogenous concentrations of adenyl and guanyl nucleotides available. Aliquots from a mixture of Rv-containing and Yv-containing homogenates (prepared from exponential phase cells) were preincubated for the times indicated and then incubated for 10 min at 31°C. The number of hybrid vacuoles resulting from each incubation was measured by light microscopy, and plotted relative to the total number of yeast-containing (Yv + hybrid) vacuoles. Hv, number of hybrid vacuoles ; Tv, total number of yeastcontaining vacuoles. vacuole populations were maintained in separate test tubes on ice and mixed just before the incubation, indicating that the observed increase in the extent of fusion is not caused by facilitated vacuole contact (e.g., adhesion) occurring during the preincubation time. Activation was also seen when vacuoles from exponential phase cells were incubated for 10 min at 30°C, cooled on ice, and reincubated 1-2 h later for 10 min at 30°C (data not shown). Similar results were obtained in homogenates prepared from stationary phase cells. Extent of the Fusion Reaction At any given time, the in vitro system shows a fixed potential for the total number of vacuole fusion events that it will support; while the fusion rate may vary, the plateau level remains constant (e.g., Fig. 1). Analysis of the changes in vacuole species during the course of the reaction (Fig. 2) indicates that there are at least two vacuole subpopulations present: those that are competent to fuse, and those that are not. The size of the fusionincompetent subpopulation relative to the total vacuole population is substantial (e.g., 40% of the monomers, the predominant vacuole type initially present, are fusion-incompetent in Fig. 2). In addition, the final distribution of vacuole species is not altered by cAMP, only the rate at which the 808 THE JOURNAL OF CELL BIOLOGY " VOLUME 85, 1980 vacuole population reaches this distribution (see Results). These observations suggest that the fixed potential for fusion in this system at any given time is a function of the relative size of the fusioncompetent, cAMP-sensitive vacuole subpopulation. Exhaustion of the pool of fusion-competent monomers coincides with a sharp drop in the overall reaction rate, followed shortly thereafter by cessation of the reaction (Fig. 2; cf., e.g., Fig. 5 in reference 10). While depletion of fusion-competent vacuoles explains why fusion ceases in the monomer subpopulation, it does not explain why fusion ceases shortly thereafter in the other subpopulations. However, although further experiments will be necessary to definitively answer this question, it seems clear that as fusion between fusion-competent vacuoles proceeds, the ratio of nonreactive to reactive species will continuously increase, which will cause the opportunities for contact between fusion-competent vacuoles to decrease continuously. The extent of the reaction will therefore be self-limiting in a manner that is independent of the rate of fusion, as is observed (e.g., Fig. 1). The finding that the extent of the fusion reaction (plateau level) increased during the initial hours after preparation of the homogenates (e.g., Fig. 3) was unexpected. The results presented strongly suggest that the fraction of vacuoles that are fusion-competent increases during this time. The mechanism by which this occurs is not known. However, while the observed activation process could represent an artifact of the in vitro state (e.g., "recovery" of the vacuole membranes after the trauma of homogenization), the findings that the dye exclusion values remained unchanged during the activation process, and that the specificity of the fusion reaction was maintained, suggest that nonspecific modification of the vacuole membranes is not occurring. In addition, the observations of Ryter and Bowers provide evidence that, for a number of hours after phagocytosis, the phagocytic vacuoles of Acanthamoeba consist of at least two subpopulations in vivo: those that have fused with lysosomes and other phagocytic vacuoles, and those that have not. It will be noted that the vacuole subpopulations histochemically detected by Ryter and Bowers are of similar size to the subpopulations observed in this study, and that the relative size of the histochemical subpopulation that has fused with lysosomes increases slowly over a period of hours in vivo. It does not seem unreasonable, therefore, to suggest that the slow acquisition of fusion competency and cyclic nucleotide sensitivity by the phagocytic vacuoles observed here in vitro, may reflect a similar membrane processing step which occurs in vivo. Rate of the Fusion Reaction The methylxanthines are specific inhibitors of cyclic nucleotide phosphodiesterase activity in a wide variety of systems, and have been reported to cause the elevation of both cAMP and cGMP levels. The present observations that theophylline and MIX each cause stimulation of the average rate of in vitro vacuole fusion (e.g., Fig. 1) therefore suggest that the reaction rate is dependent on the concentration of cyclic nucleotide(s). The results that the stimulatory effect of MIX was reproduced by the addition of 1 mM cAMP ( Fig. 1 and Table 1), but not by addition of 1 mM cGMP (Table I), indicate that the effect seen with the methylxanthines is due primarily to an elevation of the endogenous cAMP concentration. The relatively high (nominally 1 mM) concentration of exogenous cAMP required for full stimulation presumably reflects the high level of phosphodiesterase activity present in Acanthamoeba homogenates." The lack of effect of adenosine and 5'-AMP (Table I) indicates that the effect is not caused by a metabolite of cAMP. In addition, the strongly antagonistic effect of cGMP (Table I) suggests that besides regulating a wide variety of other cellular processes, cAMP and cGMP may also regulate the phagolysosomal membrane fusion process. Indirect observations on other types of systems (e.g., reference 4) have suggested that vesicle fusion in vivo is an ATP-requiring process. The present observations that exogenous ATP can maintain the initial fusion rate, that the effect is highly specific, and that it is apparently dependent on splitting of the terminal pyrophosphate linkage of ATP (Table I), indicate that the rate of vacuole fusion is linked to the ATP concentration in these homogenates. The reason for the lack of effect of exogenous ATP in some preparations is not known, but it presumably indicates that the endogenous ATP concentration was not always ratelimiting. It should be emphasized, however, that it is presently unclear whether the effect of ATP in this system is distinct from that of cAMP, as ATP may simply provide the substrate necessary S. B. Achar and R. A. Weisman, personal communication. for adenyl cyclase to generate cAMP. On the other hand, if CAMP exerts its action by activating a protein kinase, as is thought to occur in virtually all cases, then ATP would also probably be necessary as a phosphoryl donor at one or more subsequent phosphorylation steps. The stimulation of the fusion reaction by cAMP was particularly noteworthy in view of the fact that F-, which typically stimulates adenyl cyclase in broken cell preparations from mammalian systems, inhibits fusion. This apparent paradox was resolved by the recent observation by Achar and Weisman that adenyl cyclase of Acanthamoeba is inhibited by fluoride in concentrations similar to those found to inhibit the fusion reaction." The previously reported inhibition by fluoride therefore appears to be attributable to at least two actions: (a) inhibition of glycolysis, which would lower the endogenous ATP concentration, and (b) inhibition of adenyl cyclase, which would lower the endogenous cAMP concentration. Interestingly, cGMP and GTP were found in the present study to be more than three times as potent as fluoride in inhibiting the fusion reaction (50% inhibition by KF occurs at -3 mM, whereas 1 mM cGMP or GTP inhibits 75% ). The mechanism of inhibition by the guanyl nucleotides is presently unknown, as is the mechanism of stimulation by the adenyl nucleotides. Studies of the effects of these nucleotides in this system on membrane phosphorylation, endogenous calcium levels, and/or endogenous phospholipase A activity, are likely to shed further light on their role in the vacuole fusion process. |
Published: February 8, 2008 12:00 am Updated: Feb. 8, 2008 12:59 a.m.
Scott Mitchell, right, former Springville High, University of Utah and NFL quarterback, tries on a Springville High hat given to him by principal Rick Robins.
SPRINGVILLE For the past few years, Scott Mitchell and his family have been able to watch from home the nightly fireworks at Disney World.
"Mickey Mouse is my neighbor," Mitchell said Thursday at a press conference introducing him as the new Springville High football coach.
Such a lifestyle has many wondering why the former Red Devils, University of Utah and NFL quarterback is leaving the 80-degree winters and his lucrative real-estate career for a less-than-minimum-wage coaching job and the snow-packed roads of Utah. He admits that it does kind of defy common sense. But the lure of working with young people and getting back into football again has him making the seemingly uncommon move.
"This is one of those things that just feels right," Mitchell said. "No matter where I've lived, this is where I've always been from. I want to be able to come back and give what I know."
It was that kind of sincerity that made principal Rick Robins decide Mitchell was the right man for the Red Devils' post.
"The one thing that I thought really stood out about Scott was his passion, love and commitment for Springville and the community and the kids here," Robins said. "I'm just thrilled to have him on our staff."
Some are expecting Mitchell to be the one who rescues the Red Devils from two straight disappointing seasons in which they won a combined five games. But when Mitchell meets with parents and players tonight, he's going to remind them that his past success and fame as a college and NFL quarterback don't guarantee the Red Devils any additional wins in the future.
Those kind of results will only come with everyone involved in the program having the same level of commitment, the same vision, same dedication and being part of the same journey. Mitchell will tell players and parents that that journey begins today.
"One of my greatest assets is bringing people together," he said.
Still, Mitchell's football experience and contacts certainly can't hurt the Red Devils on their road to recovery. However, Mitchell didn't reveal much Thursday about the style of football that he'll bring to his alma mater. Mainly, because he doesn't yet know. Rather than come in with a determined offensive and defensive system in mind, he wants to see the personnel with which he has to work.
"I don't want to feed them steak if they're only ready for vegetables and fruit," he said.
But he does plan to pass on some of the principles of success that he's learned over his professional career. Mainly, that success doesn't happen by accident. He wants his players to work hard, practice hard, learn to focus and learn to prepare.
"I won't expect them all to go on to play college football or in the NFL, but I will expect them to have some kind of success in their lives," he said.
He also has a philosophy of how football should be played.
"I'll expect my players to run hard to the football, expect them to start strong and expect them to finish," Mitchell said.
The first big challenge facing the new coach is assembling a staff. Springville will hire a new advanced conditioning teacher who will also likely be one of Mitchell's assistants. He plans to hire others who have the time, energy and knowledge, but also some who just know the ins and outs of high school football.
"I want people who share my vision and who are committed to what we are doing," he said.
Another key to improving Springville's football program, Mitchell said, is improving and coordinating the community's youth football leagues. Those programs need to teach kids proper football fundamentals. Mitchell also said Springville football will be about more than just winning.
"I believe you win the right way and that you lose the right way," he said. "You always need to have respect for your opponents."
Mitchell led the Red Devils to a state title in 1985. He then went on to throw for nearly 9,000 yards and 69 touchdowns in three seasons at Utah. In 12 NFL seasons playing for Miami, Detroit, Baltimore and Cincinnati he threw for more than 15,000 yards and 95 touchdowns. His best year was with Detroit in 1995, when he threw for more than 4,000 yards and 32 TDs.
Mitchell and his family are expected to make the permanent move from Florida to Utah within the next two months. |
Multi-site test optimization for multi-Vdd SoCs using space- and time- division multiplexing Even though system-on-chip (SoC) testing at multiple voltage settings significantly increases test complexity, the use of a different shift frequency at each voltage setting offers parallelism that can be exploited by time-division multiplexing (TDM) to reduce test length. We show that TDM is especially effective for small-bitwidth and heavily loaded test-access mechanisms (TAMs), thereby tangibly increasing the effectiveness of multi-site testing. However, TDM suffers from some inherent limitations that do not allow the fullest possible exploitation of TAM bandwidth. To overcome these limitations, we propose space-division multiplexing (SDM), which complements TDM and offers higher multi-site test efficiency. We implement space-and time-division multiplexing (STDM) using a new, scalable test-time minimization method based on a combination of bin packing and simulated annealing. Results for industrial SoCs, highlight the advantages of the proposed optimization method. |
def generate_event(self, values):
graph_json = {
'wall_time': time.time(),
'graph_def': values.get('graph'),
}
graph_event = json_format.Parse(json.dumps(graph_json), summary_pb2.Event())
return graph_event |
. The increasing use of cannulation of the internal jugular vein led to an increase of complications associated with this procedure. In most cases the reason is incorrect puncture. Functional sonographic studies of the internal jugular vein have demonstrated the anatomical conditions for safe cannulation. The optimal site of puncture was detected with a sector scan by measuring volumes in both internal jugular veins in 12 patients dependent on the position and rotation of the head. As a result of our study we propose a modification in the cannulation technique with a headdown position and 45 degrees rotation of the head to the opposite site without palpating the carotid artery. |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { EuiPanel, EuiSelect } from '@elastic/eui';
import { noop } from 'lodash/fp';
import React, { memo } from 'react';
import { HeaderSection } from '../../../../components/header_section';
import { HistogramSignals } from '../../../../components/page/detection_engine/histogram_signals';
export const sampleChartOptions = [
{ text: 'Risk scores', value: 'risk_scores' },
{ text: 'Severities', value: 'severities' },
{ text: 'Top destination IPs', value: 'destination_ips' },
{ text: 'Top event actions', value: 'event_actions' },
{ text: 'Top event categories', value: 'event_categories' },
{ text: 'Top host names', value: 'host_names' },
{ text: 'Top rule types', value: 'rule_types' },
{ text: 'Top rules', value: 'rules' },
{ text: 'Top source IPs', value: 'source_ips' },
{ text: 'Top users', value: 'users' },
];
const SignalsChartsComponent = () => (
<EuiPanel>
<HeaderSection title="Signal detection frequency">
<EuiSelect
options={sampleChartOptions}
onChange={() => noop}
prepend="Stack by"
value={sampleChartOptions[0].value}
/>
</HeaderSection>
<HistogramSignals />
</EuiPanel>
);
export const SignalsCharts = memo(SignalsChartsComponent);
|
Scope for accessing the chain length dependence of the termination rate coefficient for disparate length radicals in acrylate free radical polymerization A method that utilizes reversible addition fragmentation chain transfer (RAFT) chemistry is evaluated on a theoretical basis to deduce the termination rate coefficient for disparate length radicals ks;l t in acrylate free radical polymerization, where s and l represent the arbitrary yet disparate chain lengths from either a 'short' or 'long' RAFT distribution. The method is based on a previously developed method for elucidation of ks;l t for the model monomer system styrene. The method was expanded to account for intramolecular chain transfer (i.e., the formation of mid-chain radicals via backbiting) and the free radical polymerization kinetic parameters of methyl acrylate. Simulations show that the method's predictive capability is sensitive to the polymerization rate's dependence on monomer concentration, i.e., the virtual monomer reaction order, which varies with the termination rate coefficient's value and chain length dependence. However, attaining the virtual monomer reaction order is a facile process and once known the method developed here that accounts for mid-chain radicals and virtual monomer reaction orders other than one seems robust enough to elucidate the chain length dependence of ks;l t for the more complex acrylate free radical polymerization. © 2007 WILEY-VCH Verlag GmbH & Co. KGaA, Weinheim. |
Aware he was starting only 20 minutes or so before kick-off, Ulster's Will Addison didn't quite have the time to get his cheering section in place for his first Test game in Dublin on Saturday.
Ireland were already halfway through their warm-up when Robbie Henshaw tweaked his hamstring, leaving for the former Sale man in Joe Schmidt's midfield.
Even though they couldn't be there in person, it was an emotional day for his family, who had been on hand to witness his debut against Italy in Chicago a week prior, especially his Fermanagh-born mother.
"I was pretty much just in tears to my mum just there," he revealed after the game. "They sadly couldn't come over this time with it being a spur of the moment selection but I'm lucky that I had my girlfriend here, which was great.
"I have been on the phone to the parents and the family and I was full of emotion. That surprise selection luckily kind of took out that emotion and let me concentrate on the task. Certainly, after the game, it filled me with absolute pride.
"It was a bit of a whirlwind so I wouldn't remember the exact minute but it was probably about 20-minutes before (kickoff). I went through the second half of the warm-up phase which was really good in terms of helping me bed into the game.
"The preparation that you do during the week really prepares everyone in the squad whether you involved or not; everyone knows their role extremely well.
The attention to detail and pressure of having done your homework is a famed aspect of Ireland camps under Joe Schmidt, with Addison believing such preparation was a massive help when handed his most unexpected of starts.
"It's certainly an eye opener but I don't think I would have coped today without that amount of pressure that we have week in week out. The pressure is really on us to know our role and understand what our role requires.
"When I get thrown in at the deep end that's what I fall back to. I know that I have coped in training which is a very high intensity and that gives me the confidence that I can cope at international level too."
"The last few months has helped, having a few days in camp in Australia (during the summer) gave me a taste for the environment and then I had a weekend in August.
"Then the last week has been great especially being away in Chicago with the group has really pushed me and made sure that I am really aware of what's required. I have really enjoyed the last few weeks."
This was hardly a vintage performance from Schmidt's Ireland, a team held to a decidedly higher standard than any of their predecessors.
Argentina targeted their line-out, both in their selection and during the game, while in attack ten turnovers were in contrast to only a solitary line-break.
There were still three tries -Kieran Marmion, Bundee Aki and Luke McGrath all crossed the whitewash - and rust was a passable excuse given that Ireland's front-line selection haven't taken the field together since the third Test against Australia back in June.
Addison was one who did impress, so too Aki, James Ryan and the host's scrum.
In what was only his second Test, and two months after his Ulster debut, the 26-year-old thinks there is still more he can offer.
"I wasn't totally happy with my performance at times and I wouldn't look at excuses like that (not having run much in the 13 role)," he said.
"The challenge I have been set is to know a few roles in the team and I have really enjoyed that but there is no stone left unturned during training for me to be fully prepared for the weekend.
"I have probably got to look at myself. I really enjoy 13 and it's probably where I prefer playing but I have really enjoyed my rugby at 15 for Ulster.
"I have got to be adept at both positions and I feel that I have taken a step today but I have got a lot to improve on.
"I think there is a little bit defensively that I am not happy with. I love defending it is one of my favourite parts of the game.
"Set-piece wise you come up against a new challenge in international rugby. It is probably one of those things that has taken me back, the challenge at set piece and during phase play, that when you take someone defensively it is something I really need to work closely with Faz (defence coach, Andy Farrell) on because he is thereabouts the best defensive coach in the world. I am very fortunate to have that resource available to me."
He also reserved special mention for his provincial defence coach Jared Payne. While it was another Irish 13 who was Addison's hero growing up, his more immediate predecessor is perhaps a more apt comparison. Payne was a famed defensive organiser in the midfield channel under Schmidt as well as having the versatility to play full-back at a high level.
"I keep reiterating that I've got Jared Payne as my mentor at my club who was renowned for his defensive abilities," he added.
"I've got him mentoring me week in week out. Coming into camp it makes it that bit easier that I have got those systems in my mind. He worked closely with Faz during the summer so I kind of had a head start thanks to working with JP."
Up next, of course, are the back-to-back World Champions, the only side ranked ahead of Ireland - The All Blacks are in Dublin on Saturday.
"The best team in the world are coming to town," said Addison. "Everybody will be putting up their hand in training and that's going to be pretty tasty."
By Michael Sadlier Jacob Stockdale is facing a race against time to be fit for Ulster's crunch PRO14 play-off with Irish rivals Connacht.
By Ruaidhri O'Connor For the third successive season, Munster have fought tooth and nail to get themselves to a Champions Cup semi-final. |
// Copyright 2017 <NAME>
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef LEAF_DATA_HEADER
#define LEAF_DATA_HEADER
#include "storableWrapper.hpp"
#include "dataBuilder.hpp"
namespace RecHDF
{
class leafData
// needed so that we can get a shared pointer of this
: public std::enable_shared_from_this<leafData>
{
public:
leafData(double val, std::string name = "leafData" )
: name(name),
data_(5, val)
{
}
// -------------------------------------------- //
// --- provide storageWrapperBase interface --- //
// -------------------------------------------- //
// Data vectors gathering and outputing
virtual std::vector< named_double_vect_t > data() const
{
dataVectorBuilder dataV("data",data_);
return dataV.get();
}
// Getter to get all children recursivly
virtual std::vector< named_instances_t > children() const
{
instanceVectorBuilder instances(name, std::make_shared<storableWrapper >(shared_from_this()));
return instances.get();
}
protected:
// name
std::string name;
// data
std::vector<double> data_;
};
}
#endif //LEAF_DATA_HEADER |
Economic growth, an evolutionary process that gives rise to an attractor Economic growth is seen here as the outcome of an entrepreneur-driven process of evolution in the context of an economy of competitive markets. In the course of this process the entrepreneurs implement capital and labour factors, one part of them committed to substitution and the other to complementarity with increasing returns. The theory demonstrates that the conditions of equilibrium of the different markets give rise to an attractor made up of steady states. The growth determinants for these states are employment, investment and technical productivity, with the profit share in income always being equal to 1/3. The comparison of what is learned with the empirical reality of the main developed economies demonstrates the interest of this view of growth. The attractors of the United States economy for the period 19602000 are given special attention. |
export * from "./IconArrowDown"
export * from "./IconArrowRight"
export * from "./IconChecked"
export * from "./IconChevonLeft"
export * from "./IconChevonLeftLight"
export * from "./IconCircleClose"
export * from "./IconClear"
export * from "./IconDiscord"
export * from "./IconExternal"
export * from "./IconFacebook"
export * from "./IconIndicator"
export * from "./IconInfo"
export * from "./IconInstagram"
export * from "./IconLogbook"
export * from "./IconLogo"
export * from "./IconLogoText"
export * from "./IconMatters"
export * from "./IconMetaMask"
export * from "./IconMinus"
export * from "./IconOpenSea"
export * from "./IconPlus"
export * from "./IconScrollDown"
export * from "./IconSearch"
export * from "./IconSend"
export * from "./IconSpinner"
export * from "./IconTelegram"
export * from "./IconTimelineFinished"
export * from "./IconTimelineOngoing"
export * from "./IconTimelineReady"
export * from "./IconTwitter"
export * from "./IconUser"
export * from "./IconUserAnon"
export * from "./IconUserChecked"
export * from "./IconWallet"
export * from "./IconWalletConnect"
export * from "./IconWorld"
export * from "./withIcon"
|
#include<stdio.h>
int main()
{
long long int a,b,c,d,n,i,j,k;
scanf("%lld",&n);
while(n--)
{
scanf("%lld%lld%lld",&a,&b,&c);
i=0;
d=a%2;
j=a/2;
i=i+j*c;
i=i+d*b;
k=a*b;
if(k<i)
i=k;
printf("%lld\n",i);
}
return 0;
}
|
<reponame>milance993/bb-fuel<gh_stars>0
package com.backbase.ct.bbfuel.configurator;
import static org.apache.http.HttpStatus.SC_CREATED;
import com.backbase.ct.bbfuel.client.accessgroup.UserContextPresentationRestClient;
import com.backbase.ct.bbfuel.client.common.LoginRestClient;
import com.backbase.ct.bbfuel.client.notification.NotificationsPresentationRestClient;
import com.backbase.ct.bbfuel.data.CommonConstants;
import com.backbase.ct.bbfuel.data.NotificationsDataGenerator;
import com.backbase.ct.bbfuel.util.CommonHelpers;
import com.backbase.ct.bbfuel.util.GlobalProperties;
import com.backbase.dbs.notifications.rest.spec.v2.notifications.NotificationsPostRequestBody;
import java.util.stream.IntStream;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
@Slf4j
@Service
@RequiredArgsConstructor
public class NotificationsConfigurator {
private static GlobalProperties globalProperties = GlobalProperties.getInstance();
private final NotificationsPresentationRestClient notificationsPresentationRestClient;
private final LoginRestClient loginRestClient;
private final UserContextPresentationRestClient userContextPresentationRestClient;
/**
* Create global notifications. Requires also either ingest.approvals.for.notifications=true or disabled approval
* flow on notifications service.
*
* @param externalUserId - external user ID of notifications manager with create and approve permissions
*/
public void ingestNotifications(String externalUserId) {
loginRestClient.login(externalUserId, externalUserId);
userContextPresentationRestClient.selectContextBasedOnMasterServiceAgreement();
int randomAmount = CommonHelpers
.generateRandomNumberInRange(globalProperties.getInt(CommonConstants.PROPERTY_NOTIFICATIONS_MIN),
globalProperties.getInt(CommonConstants.PROPERTY_NOTIFICATIONS_MAX));
IntStream.range(0, randomAmount).parallel().forEach(randomNumber -> {
NotificationsPostRequestBody notification = NotificationsDataGenerator
.generateNotificationsPostRequestBodyForGlobalTargetGroup();
notificationsPresentationRestClient.createNotification(notification)
.then()
.statusCode(SC_CREATED);
log.info("Notification ingested with title [{}] and target group [{}]", notification.getTitle(),
notification.getTargetGroup());
});
}
}
|
Genesis of Volcanic Rocks in the Zijinshan Ore District, SE China: Implications for Porphyry-Epithermal Mineralization Volcanic rocks, as the extrusive counterparts of the mineralized intrusions, can provide important information on the magma source, petrogenesis, and metallogenic conditions of the coeval porphyry-epithermal system. Shanghang Basin volcanic rocks are spatially and temporally related to a series of adjacent porphyry-epithermal CuAu deposits, and they can be used as a window to study the related deposits. Two laser-ablationinductively coupled plasmamass spectrometry zircon UPb analyses of the volcanic rocks yield weighted mean ages of ~105 Ma, identical to the age of the coeval porphyry-epithermal mineralization. Rocks have SiO2 contents of 55.4 to 74.8 wt % and belong to the high-K to shoshonitic series, characterized by strong differentiation of light rare-earth elements (REEs) relative to heavy REEs (mean LaN/YbN = 16.88); enrichment in light REEs, Rb, Th, and U; and depletion in Nb, Ta, Zr, Hf, and Ti. The volcanic rocks display (Sr/Sr)i values of 0.709341 to 0.711610, Nd(t) values of −6.9 to −3.3 Hf(t) values of −3.95 to −0.30, and 18O values of 6.07% 6.79%, suggesting that the parental magmas were derived from a mantle source enriched by subduction-related progress. SiO2 content shows a strong negative correlation with the contents of some major and trace elements, indicating that fractional crystallization played an important role in the generation of these rocks. A binary mixing model of HfO isotopes gives an estimated degree of crustal contamination of 30%. In addition, magnetite crystallized early, and the samples showed high zircon EuN/EuN* values (0.480.68), indicating that the parental magma had a high oxygen fugacity. The inferred suppression of plagioclase crystallization and increasing hornblende crystallization during magma evolution suggest that the magma was water rich. The high-water content and high oxygen fugacity of the magma promoted the dissolving of sulfides containing Cu and Au in the source area and contributed to the migration of ore-forming elements. Introduction The Zijinshan ore district is located in southwestern Fujian Province, southeastern China ( Figure 1) and is one of the most important copper-, gold-, and molybdenum-producing areas in the country. Proven reserves of gold, silver, copper, and molybdenum exceed 400 tons, 6000 tons, 4.1 million tons, and 0.11 million tons, respectively. It is generally considered that the copper-gold-molybdenum mineralization was associated with large-scale magmatic and hydrothermal activities during the Late Mesozoic, with most of these deposits having formed during the Cretaceous. The Zijinshan ore district is widely regarded as a typical porphyry-epithermal metallogenic system. The district includes the Zijinshan high-sulfur epithermal copper-gold deposit, the Yueyang low-sulfur epithermal silver-gold-dominated polymetallic deposit, and the Luoboling porphyry copper-molybdenum deposit ( Figure 2). Previous studies have focused mainly on the mineralization process of the deposit and the structure of the ore field [7,, whereas little attention has been paid to the source, nature, and evolution of the parental magma from which these deposits were formed. Recent studies have suggested that the Luoboling porphyry copper-molybdenum deposit and the Zijinshan copper-gold deposit were produced by discrete magmatic-hydrothermal systems, but the nature of the ore-forming magma associated with the Zijinshan copper-gold deposit is still unclear. However, in the Shanghang Basin, which lies adjacent to the southwest of the Zijinshan ore district (Figure 2), the porphyry-epithermal Cu-Au deposits are spatially and temporally related to the volcanic rocks of the Shimaoshan Group. The volcanic rocks are considered as the extrusive magmatic counterpart of the mineralized rocks, and their study should therefore provide important new information on the magma source, petrogenesis, and metallogenic conditions of the porphyry-epithermal system at Zijinshan. Shimaoshan Group volcanic rocks are exposed in parts of the Zijinshan ore district but are distributed mainly in the Shanghang Basin ( Figure 2). Two periods of magmatic activity occurred in the vicinity of the Zijinshan ore district, at~165-150 Ma and~113-95 Ma [5,. The ages of formation of these deposits are well established, including the Zijinshan epithermal copper-gold deposit at 102.86 ± 0.61 Ma and the Luoboling porphyry copper-molybdenum deposit at 104.9 ± 1.6 Ma. Clearly, there is a close temporal and spatial relationship between the Shimaoshan Group volcanic rocks and deposits of the Zijinshan ore district. In this paper, we report new zircon U-Pb ages; trace elements, Hf-O isotopes, and whole-rock major; and trace elements and Sr-Nd isotopes of Shimaoshan Group volcanic rocks to better constrain the nature of the magma (including mantle sources) and mineralization conditions (including magma water content and oxygen fugacity) associated with the formation of the Zijinshan ore district. Geological Background and Sampling Southeastern China comprises the Cathaysia Block in the southeast and the Yangtze Block in the northwest. According to the different crustal evolutions on opposing sides of the Zhenghe-Dapu Fault, the Cathaysia Block can be further divided into the Western Cathaysia and Eastern Cathaysia regions. Magmatic activity in southeastern China during the Late Mesozoic was concentrated mainly in the Cathaysia Block, where it migrated seaward over time and increased in intensity. Volcanic-intrusive magmatism during the Yanshanian (65-220 Ma) produced mainly felsic rocks with some basalts and rare intermediate rocks. Compared with the Jurassic, there are fewer Cretaceous granitoid rocks but more abundant volcanic rocks. Jurassic volcanic rocks and granitoids are distributed mostly in Western Cathaysia, whereas Cretaceous basaltic rocks and granitoids appear mainly in Eastern Cathaysia (Figure 1a). Fujian Province, located in the eastern part of southeastern China, is divided into three tectonic belts by the Changle-Nanao and Zhenghe-Dapu faults. From east to west, these belts are the Pingtan-Dongshan Metamorphic Belt, the Yanshanian Magmatic Belt, and the Early Paleozoic Fold Belt (Figure 1b). The Cretaceous volcanic rocks in Fujian Province can be divided into upper series named the Shimaoshan Group and lower series named the Moshishan Group, with most of the volcanic rocks in the Shanghang Basin belonging to the Shimaoshan Group. The Shimaoshan Group comprises the Huangkeng Formation and the overlying Zhaixia Formation, with each formation being further divided into upper and lower members. The Huangkeng and Zhaixia formations of the Shimaoshan Group show substantial variation in thickness between areas of tectonic subsidence and areas of tectonic uplift. The thickness of the Huangkeng Formation in areas of tectonic subsidence reaches 700 m, whereas in areas of tectonic uplift, the rocks of this formation are scattered across the Ermiaogou, and Zijinshan ore districts with a thickness of only tens to a few hundreds of meters. Rocks of the lower member of the Huangkeng Formation comprise mostly conglomerate, glutenite, and siltstone, mixed with purple-gray dacite in the middle, and have a total thickness of about 230 m. The lower member unconformably overlies the Zijinshan Complex in the Yueyang mining area and the Louziba Group in the Ermiaogou mining area. The upper member of the Huangkeng Formation is composed of purple-gray andesite, andesitic breccia, light-gray dacitic crystal tuff, and dark-gray andesite basalt, with a total thickness of >350 m. The Zhaixia Formation is distributed mainly in the southwest part of the Zijinshan ore district. Its lower member is composed of thin layers of purple-red silty mudstone intercalated with purple-red rhyolitic sedimentary tuff and rhyolitic crystal tuff, with a total thickness of up to 372 m. The upper member consists mostly of gray-white rhyolite, purple-red volcanic breccia, and rhyolitic breccia tuff, with a total thickness of up to 1037 m. The ages of volcanic rocks of the Shimaoshan Group are well established. By combining multiple dating methods (such as zircon LA-ICP-MS, whole-rock Rb-Sr isochron, zircon SHRIMP), estimates of the formation age of the Huangkeng Formation volcanic rocks are clustered within 93-125 Ma and those of the Zhaixia Formation within 93-121 Ma. The formation age of Shimaoshan Group volcanic rocks was constrained to 104-94 Ma by using laser ablation inductively coupled plasma mass spectrometry (LA-ICP-MS) zircon U-Pb dating. A whole-rock Rb-Sr isochron age of rhyolite in the Zhaixia Formation of 94 ± 7.7 Ma and an age of dacite in the Huangkeng Formation of 125 ± 9.8 Ma were reported. Zircon U-Pb dating of purplish-red rhyolite and breccia from the Huangkeng Formation has yielded ages of 110.1 ± 0.7 Ma and 113.0 ± 1.9 Ma, respectively. Zircon U-Pb dating of dacite and tuff in the Zijinshan ore district has given ages of 110 ± 1 Ma and 111 ± 1 Ma, respectively. Zircon U-Pb ages were obtained for limestone and rhyolite in the Yueyang mining area of 105 ± 1 Ma and 102 ± 1 Ma, respectively. The above data together indicate that Cretaceous magmatism in the Zijinshan ore district lasted for about 20 Myr. Samples for this study were obtained from the Zijinshan ore district and the Shanghang Basin. The Shanghang Basin is controlled by a NW-trending fault with an area of 100 km 2. The southwestern and northeastern margins of this basin are controlled by deep NW-trending faults ( Figure 2). The Shimaoshan Group, mainly the upper member of the Huangkeng Formation, is exposed in the Shanghang Basin with a thickness of >350 m. Most of the samples for this study were collected from drill cores (ZK5710 and ZK5903), with a smaller number of samples being obtained from surface exposures. A total of 62 rock samples were collected, combining the characteristics of hand specimens and the characteristics of major elements; we divided them into three categories, namely, trachyte, trachyandesite, and rhyolite-dacite, respectively. Most of the volcanic rock samples have undergone variable degrees of alteration. We have selected a more representative sample for a brief introduction from each category. Rhyolite is pink with a patchy structure (Figure 3a), in which the phenocrysts are mainly alkaline feldspar and the matrix is cryptocrystalline (Figure 3b). Trachyte is dark gray with a patchy structure and contains phenocrysts of mainly feldspar and subordinate amphibole (Figure 3c,d). Trachyandesite is dark gray, with phenocrysts of mainly plagioclase and subordinate magnetite in a matrix of grains with a preferred orientation (Figure 3e,f). Zircon U-Pb Dating and Trace-Element Analyses Conventional density and magnetic methods were used to separate zircon grains from two samples (SH1604, trachyte and ZK5710-30, trachyte), following which grains were hand-picked under a high-resolution optical microscope. Representative zircon grains were set in epoxy resin and then polished to expose their cores. Cathodoluminescence (CL) imaging was used to examine zircon morphology and internal textures and to select analysis locations. Zircon U-Pb dating and trace-element analyses were carried out concurrently by LA-ICP-MS at the State Key Laboratory of Ore Deposit Geochemistry, Institute of Geochemistry, Chinese Academy of Sciences (SKLODG, IGCAS), Guiyang, China. A GeolasPro 193 nm ArF excimer laser (Coherent, Gilching, Germany) was used to ablate zircons, and an Agilent 7900 ICP-MS instrument (Agilent, Santa Clara, CA, USA) provided ion signal intensities. A mixture of helium and argon was used as the carrier gas to transport the aerosol to the ICP-MS instrument. Zircon 91500 was used as an external standard, and reference materials NIST 610, BIR-1G, BCR-2G, BHVO-2G, PL-1, and QINGHU were used to calibrate the trace-element data. Adopted values of these reference materials are from the GeoReM database. Each sample analysis involved 18 s background acquisition and 50 s data collection. Operating conditions included a laser beam diameter of 32 m, a repetition rate of 5 Hz, and an energy flux of 8 J/cm 2. ICPMSDataCal software (Version 11.8, Sample Solution, Wuhan, China) was used to process the U-Pb isotopic and trace-element data. Si was used as an internal standard, and multiple elements were used as external standards to calibrate analyzed element contents. Isoplot 4.15 software (Berkeley Geochronology Center, Berkeley, CA, USA) was used to construct concordia plots and calculate weighted mean ages. Whole-Rock Major-and Trace-Element and Sr-Nd Isotope Geochemical Features Rock samples were cut into small pieces and fresh parts were selected and ground into powder for whole-rock geochemical analyses. An Axios (PW4400) X-ray fluorescence spectrometer (Thermo Fisher, Waltham, MA, USA) was used to determine contents of major elements at the SKLODG, IGCAS. Loss-on-ignition (LOI) values were measured by heating 1 g sample powder to 1100 C for 1 h. Trace-element analyses were also conducted at the SKLODG, IGCAS, by using an ICP-MS instrument (PE DRC-e) produced by PerkinEmer company (Waltham, MA, USA). For each analysis, a 50 mg subsample of rock powder was dissolved at 190 C in a mixture of HF and HNO 3, and placed in a PTFE-lined stainless-steel bomb for 48 h. To monitor signal drift during counting, Rh was applied as an internal standard. International standards GBPG-1, AGV-2, and AMH-1 were used to monitor analytical quality, with analytical accuracy generally better than ±10%. Details of the experimental procedures followed are given by. A TRITON thermal-ionization mass spectrometer (Thermo Fisher, Waltham, MA, USA) was used to determine Sr-Nd isotope compositions of whole-rock samples at the SKLODG, IGCAS. Samples were dissolved by using the same method as that used for trace elements. Cationic ion-exchange procedures were applied to effectively separate Sr and Nd in solution. Sr and Nd isotopic ratios were normalized with respect to 86 Sr/ 88 Sr = 0.1194 and 146 Nd/ 144 Nd = 0.72419. Details of the experimental method and procedure are given by. Zircon O Isotope Analyses Determinations of zircon O isotopic composition were conducted by a Cameca IMS-1280HR secondary-ion mass spectrometer (Camera, Paris, France) at the SIMS laboratory in the Guangzhou Institute of Geochemistry, Chinese Academy of Sciences, Guangzhou, China. Analytical procedures followed those described by. Under conditions of 10 kV and an intensity of~2 nA, the Cs + primary ion beam was accelerated and then rasterized over a 10 m area. Analysis spots had a diameter of 20 m. The width of the inlet gap of the multicollector Faraday cup (FC) of 16 O and 18 O was 120 m, and the width of the outlet gap was 500 m. The intensity of 16 O was~1 10 9 counts per second. In multicollector mode, two off-axis Faraday cups were used to measure O isotopes. Each analysis duration included 120 s for pre-sputtering, 60 s for automatic beam formation, and 40 s for O isotope integration. Vienna Standard Mean Ocean Water (VSMOW) composition was used as the standard to normalize 18 O values. Standard zircon 91500 was used to correct the instrumental mass fractionation factor, with 18 O VSMOW = 9.9%. Zircon Lu-Hf Isotope Analyses In situ analyses of Hf isotopic compositions of zircon were performed using LA-MC-ICP-MS at the SKLODG, IGCAS. Analyses of Hf were conducted on the same spots as used for O isotope analyses. A RESOlution S-155 LA system (Australian Scientific Instruments, Canberra, Australia) and a Nu Plasma III MC-ICP-MS instrument (Nu Instruments, Wrexham, Wales, UK) were used together for the analyses. After homogenization of the beam-delivery system, the 193 nm ArF excimer laser was focused on the surface of the zircon with an energy density of 6.0 J/cm 2. Laser ablation involved a spot diameter of 40 m, a repetition frequency of 6 Hz, and an ablation duration of 40 s. Helium was used as a carrier gas to transport the aerosol to the Nu Plasma III MC-ICP-MS instrument (Nu Instruments, Wrexham, Wales, UK). For quality control, five standard zircons (GJ-1, 91500, Pleovice, Mud Tank, and Penglai) were processed for every 30 unknown analyses, and one standard zircon (Penglai) was processed for every five unknown analyses. The influence of mass deviation on Hf was corrected according to an exponential law by using the value of 179 Hf/ 177 Hf = 0.7325. The isobaric interference of 176 Lu on 176 Hf was corrected by using the recommended 176 Lu/ 175 Lu ratio of 0.02655 to calculate values of 176 Lu/ 177 Hf, and the isobaric interference of 176 Yb on 176 Hf was corrected by using the recommended 176 Yb/ 172 Yb ratio of 0.5887 to calculate values of 176 Yb/ 177 Hf. Zircon U-Pb Dating and Trace Elements The U-Pb isotope data are listed in Supplementary Table S1, and representative zircon CL images and U-Pb concordia diagrams are shown in Figures 4 Whole-Rock Major and Trace Elements Sixty-two volcanic rock samples from the Shanghang Basin were analyzed for major-and trace-element compositions, with results given in Supplementary Table S2. Sample contents were standardized to 100% after accounting for LOI. These samples have SiO 2 contents ranging from 55. represents primitive arc melt. The dotted line represents the field of experimental crust melts, data from. the solid line represents the field of metabasaltic and eclogite experimental melts at 1-4 GPa, data from. Whole-Rock Sr-Nd Isotopes Sr-Nd isotopic compositions of the studied volcanic rocks are presented in Table 1 and Figure 9a. Initial 87 Sr/ 86 Sr ratios and Nd (t) values were calculated according to the formation age determined by zircon U-Pb dating. The volcanic rocks have ( 87 Sr/ 86 Sr) i ratios between 0.708901 and 0.711610 and Nd (t) values between −6.9 and −3.3. The higher ( 87 Sr/ 86 Sr) i values of the volcanic rocks were attributed to post-magmatic alteration, since the Rb and Sr contents of the rocks has been modified by alteration. As a result, the compositions of Sr isotope are excluded in the following discussion. According to these data, the calculated two-stage depleted-mantle Nd model ages (T DM2 ) range from 1.47 to 1.19 Ga (Table 1). In Situ Zircon Hf-O Isotopes One sample (ZK5710-30) was selected for analysis of zircon Hf-O isotopes, due to their high magmatic zircon content. Results are presented in Table 2 and Figure 9b. The Hf (t) value and the two-stage depleted mantle Hf model age were calculated according to the age obtained by zircon U-Pb dating. Twenty-one analyses for sample ZK5710-30 yielded 176 Hf/ 177 Hf ratios between 0.282597 and 0.282701, and the calculated Hf (t) values range from −3.95 to −0.30, corresponding to two-stage Hf model ages of 1.47 to 1.19 Ga. Fifteen O isotope analyses were obtained with 18 O values ranging from 6.07% to 6.79%, which are higher than the mantle value of 5.3% ± 0.3% (Figure 10). Timing of Volcanism and Relationship to Porphyry-Epithermal Mineralization In the Zijinshan porphyry-epithermal metallogenic system, the direct ore-forming magma of the epithermal deposits has not been discovered. However, Shimaoshan Group volcanic rocks are distributed extensively in and around the ore district ( Figure 2). Understanding the relationship of the volcanism in the Shanghang Basin to porphyry-epithermal mineralization at Zijinshan can help to understand the nature of the metallogenic magma source area and the controls on metallogenesis. Our zircon U-Pb dating results show that volcanism in the Shanghang Basin occurred at~105 Ma, synchronous with the metallogenic age of the Zijinshan Cu-Au deposit (103 ± 1 Ma, ) and the Luoboling porphyry Cu-Mo deposit (104.9 ± 1.6 Ma, ; 104.6 ± 1.0, ). The Luoboling granodiorite porphyry and the studied volcanic rock samples partially overlap in the K 2 O vs. SiO 2 diagram, mainly in the trachyte-dacite-rhyolite field (Figure 6a). However, the volcanic rocks have a wider range of SiO 2 (55-75 wt %), and the Luoboling porphyry are all acidic (63-73 wt %) (Figure 6a), indicating that the magma that formed the porphyry underwent a higher degree of evolution relative to that of the volcanic rocks. Both the studied volcanic rocks and the Luoboling porphyry belong to the high-K calc-alkaline and shoshonitic series in a Na 2 O + K 2 O vs. SiO 2 diagram (Figure 6b). Moreover, the volcanic rocks have similar REE and trace-element patterns to those of the Luoboling porphyry ( Figure 8). These features, together with the similar ages and the close temporal and spatial relationships between the volcanic rocks and porphyries, indicate that the volcanic rocks and the magma were derived from a common magma chamber beneath the volcanic basin and underwent similar fractionation pathways. Magma Source of the Volcanic Rocks The Shimaoshan Group volcanic rocks in the Shanghang Basin show high MgO contents and high Mg # values relative to those of experimental crustal melt (Figure 7b), suggesting that the volcanic magmas cannot have originated from partial melting of crustal materials. The two-stage Hf model ages of 1.41 to 1.18 Ga (Table 2) are substantially younger than those of basement metamorphic rocks of the Cathaysia Block (>1.85 Ga, ) (Figure 9b), indicating that the crust was not the only source of the volcanic rocks. In addition, the Sr-Nd isotope data of the studied samples differ substantially from those of Neoproterozoic and Meso-Paleoproterozoic basement rocks of the Cathaysia Block (Figure 9a), with two-stage Nd model ages of 1.47 to 1.19 Ga ( Table 1) that are much younger than those of basement metamorphic rocks of the Cathaysia Block (>1.74 Ga, ). Experimental studies on melting have shown that continental crust melts are usually sodium rich, but our samples are high-K calc-alkaline and shoshonitic. Therefore, Shimaoshan Group volcanic rocks were not derived from a predominant crustal source. The enriched Sr-Nd-Hf isotopic features of the samples, together with the older Hf and Nd model ages (1.47-1.19 Ga), indicate that the parental melts of the studied volcanic rocks were derived from enriched mantle or a mixture of mantle-derived magma and crust-derived magma. Nb/Ta values can serve as an important indicator of crust-mantle interaction. Continental crust has an average Nb/Ta ratio of 11.43 and primitive mantle of 17.8. The Shanghang Basin volcanic rocks show Nb/Ta ratios that range from 7.03 to 18.18, with a mean value of 10.11, which is near the value of average continental crust. The low Nb/Ta ratios cannot fully interpret as the result of crustal contamination, since an unreasonable proportion of crustal materials is required to cause such a low ratio. All the Cretaceous mafic plutons in the Fujian coastal area of southeastern China have a narrow range of negative Nd (t) values (−1.3 to −3.3; ). This, together with the mantle-like 18 O value of these mafic plutons indicates that these rocks reflect the characteristics of a mantle source and that crustal assimilation was negligible during magma ascent. However, the volcanic rocks are more enriched in Sr-Nd compared with the mafic plutons, consistent with minor crustal contamination of the parental magma. Zircon O isotopes can effectively distinguish crustal contamination because mantle-derived magma and upper-crust-derived magma have distinct O isotope characteristics, with 18 O values of 5.3% and 10% -30%, respectively. 18 O values of our samples vary from 6.07% to 6.79% (Table 2) and are much higher than the mantle 18 O range (Figure 10a). This, combined with the negative correlation between zircon 18 O and Hf (t), indicates that the volcanic rocks in the Shanghang Basin were derived from enriched mantle but also affected by some crustal contamination. We performed quantitative modeling on the basis of the zircon 18 O and Hf (t) isotopic compositions by magma mixing of two end members to estimate the degree of crustal contamination. The Cretaceous gabbro near the study area and South China S-type granite were selected as the two end members, representing the mantle and crust components, respectively. The final composition of the mixture depends on the ratio of the two end members, each curve represents a mixed case, and the curvature of the curve is controlled by Hf m /Hf c. The results indicate that the degree of crustal contamination is <30% (Figure 10a). The samples show strong fractionation between LREEs and HREEs, enrichment of LILEs, and depletion in HFSEs, with negative Nb-Ta and Ti anomalies (Figure 8d-f). These characteristics are consistent with those of non-crust-contaminated Cretaceous mafic suites in the Cathaysia Block. Slab-derived fluids or subducted sediments can enrich mantle sources, giving rise to enrichment in LILEs and depletion in Nb, Ta, and Ti, consistent with our results (Figure 10b). The studied volcanic samples have very similar Nd isotope characteristics to those of the Cretaceous gabbro in the Fujian coastal area (Figure 9a), further suggesting a similar mantle derivation with the gabbro. Moreover, their high La/Nb and Ba/Nb ratios are consistent with the characteristics of arc volcanic rocks (Figure 10c), which are generally considered to be related to subduction. In addition to the above indicators, the ratios of incompatible trace elements with similar distribution coefficients, such as Nb/U, Nb/Ta, and Ce/Pb, can also be used to distinguish the source region, as they show negligible change during the melting process. The Nb/U values (1.49-7.58) of the studied samples are much lower than those of MORB and OIB (Figure 10d), indicating that these rocks were metasomatized by subduction-related hydrous fluids. Thus, we suggest that these volcanic rocks were derived from mantle material enriched by input of subduction-related materials. The extensive magmatic activity during the Cretaceous in the Cathaysian Block is generally considered to have been the result of westward subduction of the Paleo-Pacific Plate, which could account for the inferred enrichment of mantle material. The Zijinshan ore district volcanic rocks share similar REE and trace-element patterns and Sr-Nd-Hf isotope characteristics with typical arc volcanic rocks, suggesting a subduction affinity. In addition, the volcanic rocks represent high-K calc-alkaline to shoshonitic lavas, which are different from the intermediate, low-K to calc-alkaline rocks in modern arc environments. High-K, calc-alkaline to shoshonitic rocks are also found in lithospheric extensional settings. Accordingly, the characteristics and age of volcanic rocks of the Shimaoshan Group suggest that the coastal region of southeastern China was situated in a lithospheric extensional setting during the Cretaceous. It has been proposed that the regional tectono-magmatic evolution in southeastern China during the late Yanshanian was controlled by break-off and rollback of the subducting paleo-Pacific Plate. Thus, we suggest that this geodynamic process caused regional extension and partial melting of the mantle lithosphere in the Zijinshan district and the Shanghang Basin to produce magmas that ultimately formed the volcanic rocks. Petrogenesis The SiO 2 contents of melts directly produced by partial melting of mantle material are less than 57 wt %. However, the SiO 2 contents of the studied Shimaoshan Group volcanic rocks range from 56 to 71 wt %, suggesting that these rocks do not represent the primary magma derived from partial melting of the mantle. Similarly, the Mg # values of primitive arc melts are generally >70, compared with <67 (most <50) of the studied rocks, meaning that these volcanic rocks were formed from mantle-derived magma after a high degree of fractional crystallization in a deep magma chamber. This interpretation is supported by the lower contents of some compatible elements in the volcanic rocks. For example, the Cr (1.2-48.3 ppm) and Ni (1.8-35.6 ppm) contents of the samples are considerably lower than those of primitive arc magma (Cr = 364 ppm, Ni = 168 ppm) (Figure 7). Given that Cr tends to enter Cr-spinel and clinopyroxene, and Ni generally enters olivine, fractional crystallization of olivine, clinopyroxene, and Cr-spinel is inferred to have occurred during the early stage of magma evolution. Moreover, the volcanic rocks are not depleted in Al 2 O 3 content relative to primitive arc magma (Figure 7c), suggesting that plagioclase crystallization was negligible in the early stage of magma differentiation. In addition, there is no negative Eu anomaly apparent in an REE variation diagram (Figure 8a,c,e), also indicating that early fractional crystallization of plagioclase was negligible. The SiO 2 content of the studied samples is negatively correlated with various elements, including Fe 2 O 3 T, MgO, Al 2 O 3, and TiO 2 (Figure 7). These negative correlations can be interpreted in terms of fractionation of mafic minerals, such as magnetite, hornblende, and biotite, which tend to remove most of the Fe, Mg, and Ca from melts and cause them to evolve toward more silicic compositions. Owing to different partition coefficients of the various elements, a correlation diagram of the element ratios can qualitatively determine separation of mineral phases. For example, fractionation of biotite can lead to an increase in SiO 2 /Al 2 O 3 and a decrease in Sc/Th (Figure 11a), and fractionation of hornblende can result in a positive correlation between Dy/Yb and Nb/Ta (Figure 11b). These patterns suggest that the magma underwent fractional crystallization of amphibole and biotite when it evolved from an andesitic to a dacitic composition. The observed decreases in V and Ti with SiO 2 may be related to the separation of Fe-Ti oxides, such as magnetite, as supported by microscope observations (Figure 3f). Moreover, fractional crystallization and partial melting trends can be distinguished in diagrams of Ni versus Rb and Ni versus Ba (Figure 11c,d). In summary, the parental magma underwent fractional crystallization of Cr-spinel and clinopyroxene in the deep magma chamber with suppression of plagioclase crystallization. During magma ascent, biotite and hornblende were the main mineral phases. After mineral separation, the magma was emplaced at shallow or surficial crustal levels, forming the Shimaoshan Group volcanic rocks in the Zijinshan district. Implications for Porphyry-Epithermal Mineralization The water content and oxygen fugacity of the magma are the most important controls on the metallogenic potential of porphyry magmas. In a comagmatic system, important physicalchemical information of the parental magmas can be obtained from volcanic rocks. In turn, the f O 2 -H 2 O conditions in the parental magma of volcanic rocks provide insights into the metallogenic potential of the porphyries in the Zijinshan ore district. It is difficult to evaluate the original magmatic oxidation state in altered plutonic rocks, but normalized zircon Ce and Eu anomalies of volcanic rocks can be used to provide qualitative estimations. The element Eu has two valence states: Eu 3+ and Eu 2+. Eu 3+ is a more compatible ion than Eu 2+ and can therefore more readily replace zirconium ions in the zircon crystal lattice, but it requires more oxidizing conditions. Thus, the ratio Eu N /Eu N * can be used as a suggestion of magma oxygen fugacity. To avoid the influence of mineral inclusions (such as apatite and titanite), we used Ca > 200 ppm or La > 0.3 ppm as an indicator of apatite contamination and Ti > 20 ppm to reflect titanite contamination. At the same time, plagioclase crystallization would also influence on Eu N /Eu N *. As a result, samples with similar SiO 2 content (63-66 wt %) were selected for comparison, which represented that they have similar degrees of differentiation. Zircon Eu N /Eu N * values of the volcanic rocks are similar to those the Luoboling porphyry deposit and to some other porphyry deposits in the world (Figure 12a), suggesting that the parental magma was of high oxygen fugacity. In a diagram of Fe 2 O 3 T versus MgO (Figure 12b), Fe 2 O 3 T content is positively correlated with MgO content for the studied rocks, with the change in slope marking magnetite saturation, indicating that magnetite had crystallized and separated from the magma. The onset of magnetite saturation from basaltic magma is controlled largely by oxygen fugacity. As shown in Figure 12b, the onset of magnetite saturation in the parental magma of the volcanic rocks was like that in other typical arc magmas worldwide. This early saturation of magnetite suggests that the oxygen fugacity of magma that formed the studied Shimaoshan Group volcanic rocks was relatively high. Previous studies have shown that water-rich magma is liable to dissolve volatiles during its evolution, which is conducive to enrichment and migration of ore-forming elements. A high-water content of the magma inhibits early crystallization of plagioclase and promotes crystallization of amphibole, which can generate some trace element anomalies. For example, the magma has a higher Sr content when crystallization of plagioclase is inhibited, because there is very little Sr to replace Ca into plagioclase lattice. At the same time, medium REEs and Y can readily enter amphibole in intermediate to acidic magma, and the crystallization of amphibole in water-rich magma reduces its Y content. Therefore, water-rich magma commonly shows a high whole-rock Sr/Y ratio, consistent with our results (Sr/Y = 4.4-54.6, mostly >20), which suggest separation of amphibole. REE patterns also suggest that these elements entered amphibole during the crystallization process (Figure 8a,c,e). In addition, our samples have a whole-rock mean N value of 4.53, much higher than that of Zijinshan barren rocks (3.8, ), and have low mean Dy/Yb values of 0.2 in the zircon and high whole-rock V/Sc ratios (mean = 9.12), indicating a high water content of the magma. Furthermore, Eu 2+ can easily replace Ca 2+ in plagioclase, meaning that the Eu anomaly can be used to indicate the crystallization of plagioclase: a strong negative anomaly indicates early crystallization of plagioclase; and a weak negative anomaly suggests either that the magma had a high water content that inhibited crystallization of plagioclase or that the magma had a high oxygen fugacity with Eu existing mainly as Eu 3+. Chondrite-normalized REE patterns (Figure 8) show either a weak negative or negligible Eu anomaly (Eu N /Eu N * = 0.89-1.06), suggesting that fractional crystallization of plagioclase can be ignored. High oxygen fugacity can facilitate porphyry mineralization in multiple stages of magmatic evolution. In addition, high water contents of ore-forming magma are a key control on the formation of porphyry deposits. Copper, gold, and molybdenum are all chalcophile elements with high partition coefficients between sulfides and melts, with the behaviors of Cu and Au being strongly controlled by sulfides. Oxygen fugacity controls sulfur speciation, which in turn is the dominant influence on S solubility of natural silicate melts. The solubility of sulfate is one order of magnitude higher than that of sulfide. Under conditions of high oxygen fugacity, most of the sulfur is removed in the form of sulfate during partial melting of a mantle source, leading to the release of abundant chalcophile-forming metals from the magma. In contrast, sulfide saturation in the magma is suppressed by the high solubility of sulfur, resulting in chalcophile-forming metals (e.g., Cu and Au) being retained in the magma. Moreover, high oxygen fugacity favors the presence of Mo as MoO 4 2−, which promotes enrichment of Mo in residual melt. Water-rich magma is more likely to reach water saturation, which is conducive to the extraction and dissolution of volatiles. These volatiles are readily combined with gold, copper, and other ore-forming elements to form soluble complexes, which facilitates the enrichment and migration of ore-forming elements. Combining all of the above, we conclude that the high water content and high oxygen fugacity of the source magma promoted mineralization of the Zijinshan epithermal-porphyry deposit system. Conclusions Zircon U-Pb isotope dating yielded an age of 105 Ma for the Shimaoshan Group volcanic rocks, which is identical to the mineralization age of the nearby porphyry-epithermal ore deposits in the Zijinshan district. This, together with the similar geochemical features, suggests that the volcanic rocks and the mineralization were derived from magmas residing in a common magma chamber beneath the volcanic basin and that these magmas underwent similar fractionation pathways. Major-and trace-element geochemistry reveals that the volcanic rocks from the Shanghang Basin are predominantly high-K and shoshonitic rocks. Sr-Nd-Hf and O isotopes of the rocks indicate that they were derived predominantly from partial melting of the subduction-modified mantle in a back-arc extension setting as a response to rollback of the subducting paleo-Pacific Plate. Extensive fractional crystallization and crustal contamination account for the compositional variation in the volcanic rocks. High Eu N /Eu N * values of the studied volcanic rocks and the inferred early crystallization of magnetite imply a high oxygen fugacity of the parental magma. Inferred hornblende crystallization and inhibition of plagioclase crystallization indicate a high-water content of the magma. High oxygen fugacity and high-water content promoted the dissolution of Cu-and Au-bearing sulfides in the source area and the concentrating of Cu and Au in the magma to form metal-and water-rich magma, followed by extraction of these elements from the magma by volatiles. We conclude that the high oxygen fugacity and high water content were important controlling factors in the formation of the Zijinshan ore deposit. Conflicts of Interest: The authors declare no conflict of interest. |
PM @narendramodi, Arunachal Pradesh CM @PemaKhanduBJP, Union Minister @DrJitendraSingh & officials review the flood… https://t.co/pZqV7IBjV5 — PMO India (@PMOIndia) 1501571062000
GUWAHATI: In an attempt to find a permanent solution to the bi-annual flood problem in Assam , Prime Minister Narendra Modi announced Rs 100 crore to fund a study on the course of the Brahmaputra river and its devastating effects. A high-powered team comprising bureaucrats and technocrats will conduct the study.Modi also announced Rs 2,000 crore as an immediate aid for flood-hit Assam, Arunachal, Nagaland , Manipur and Mizoram.Modi arrived in Guwahati at 10 am for a day-long review of the flood and landslide problems that crippled the five northeastern states of Assam, Arunachal Pradesh , Manipur, Mizoram and Nagaland.State finance minister Himanta Biswa Sarma , who briefed on the PM’s visit, said, “The PM has also ordered release of an additional Rs 250 crore to Assam to meet the immediate need for relief and rehabilitation. Last month, he had ordered release of Rs 300 crore.”“He stressed on finding a permanent solution to flood in Assam and said that a high-powered committee will be formed to study the course of the Brahmaputra river and a corpus fund of Rs 100 cr would be set up for the study,” Sarma said.The Assam floods have so far claimed 83 lives.On Monday, Modi has announced an ex gratia of Rs 2 lakh each for the next of kin of those who have lost their lives in the floods and Rs 50,000 each for those seriously injured.Modi did not make any aerial survey of the flood-affected areas as flood waters have receded in almost all the areas.The ruling BJP in Assam has been criticized for the PM not visiting the state when the flood was at its peak throughout June till mid-week of July. Modi had deputed minister of state for home Kiren Rijiju to visit the flood-affected state last month and assess the situation. A central team has also visited the affected areas to study the extent of damage done by the flood. |
. The early clinical identification of malignant melanoma is important. Pigmented neoplasms are sometimes difficult to diagnose by visual inspection alone. Dermoscopy increases the reliability of the clinical assessment. Dermoscopic criteria of malignancy are well defined on a descriptive ground. They allow an optimal interpretation of the clinical aspect without, however, reaching the sensitivity and specificity of the microscopic examination. |
MONTPELIER, Vt. — At the beginning of the week, the House Judiciary Committee was set to vote on a version of S.241, the bill passed February 25 by the Senate. That bill, as passed by the Senate, would have eliminated any criminal penalties for possessing less than an ounce, and created a regulatory system that in 2017 could license up to 27 cultivators of various sizes, 15 retailers, and 5 labs, all of which could have theoretically started to operate by 2018.
By Friday afternoon, those provisions were only a pipe dream for legalization advocates who saw the House Judiciary Committee complete their full lobotomy of the aforementioned S.241 and approve a bill that barely smells like the cannabis reform passed by the Senate. In passing a heavily-amended version of S.241 the eleven-member Judiciary Committee appropriated “$350,000 for the marijuana prevention, education and counter-marketing programs”, and created a new Substance Abuse Program Manager in the Department of Health.
The bill also revised the definition of driving under the influence, lowering the B.A.C. to .05 if the person “has any detectable amount of delta-9 tetrahydrocannabinol in the person’s blood.” In terms of actually changing marijuana regulations, the provisions passed on Friday would allocate $150,000 to create a Marijuana Advisory Commission that would “report to the Governor and the General Assembly, as needed,” and issue some kind of recommendations by November 2017.
Click the link below to watch the full video of committee discussion and voting:
After more than three weeks of testimony — including a public forum in the House chamber on March 31 — the committee surprised some observers on Thursday by proposing a different ‘strike-all’ version that had eliminated the legal purchase, sales, or cultivation of cannabis, but would have classified home cultivation of two plants the same as possession of an ounce: a civil penalty starting at $200. Highlighting the divisions within the committee — and greater House — those provisions changing legalization to decriminalization failed on a 6-5 vote with Rep. Bill Frank (D-Underhill) casting the deciding vote, joining stated opponents of the measure, Reps. Tom Burditt (D-West Rutland), Marcia Martel (R-Waterford), Betty Nuovo (D-Middlebury), Vicki Strong (R-Irasburg) and Gary Viens (R-Newport).
For Rep. Frank, reached Friday evening by phone, his reason for helping to swing the vote came down to preparation and process:
“Our point was to take one thing at a time — it’s gonna happen sometime, and probably sometime soon, but I want to make sure we have education and prevention in place first, that’s why we’ve been so successful with keeping kids from smoking.”
Frank, a former school board member and former member of the Tobacco Evaluation and Review Board, said he wouldn’t have supported a Libertarian ‘legalization-without-regulation’ type of proposal either. He was not willing to compromise on decriminalization saying he thought it should be part of its own legislation and not part of what he viewed as an education bill.
After the defeat of the first amended version, Rep. Willem Jewett (D-Ripton), was quick to offer up the new aforementioned amendment, which moved all questions of legalization and decriminalization of home cultivation to the future study commission. Those amendments were enough to sway Rep. Frank to change his vote, allowing the bill to pass the committee.
Although visibly disheartened, supporters of S.241 noted that as long as the bill moves forward in some capacity, it could eventually land in a joint ‘conference committee’ where three members of the Senate would join three members of the House to revise the bill before a full floor vote.
While her role as Committee Chair has been more focused on negotiating practical compromises than projecting her personal views, Rep. Grad was asked if she would be willing to support legalization again as a House member of a conference committee, or if she had drawn her own personal line in the sand with decriminalization. Said Grad, “I need to see what happens in the House, we did not pass a legalization bill.”
The final composition of such a committee would be left up to the leadership in the House and Senate, but would likely include the influential Senator Dick Sears (D-Bennington), who was rebuked by some legalization advocates for not going far enough in the original S.241, but who today looks to pot supporters more like Dr. Bronner than Dr. Evil.
The next step for the legislation is the House Ways and Means Committee, which includes seven Democrats, three Republicans, and one Independent. The opinions of the Ways and Means leadership are largely unknown, and party lines have not necessarily been a predictable indicator of the votes. It’s unsure what kind of decriminalization, legalization, or education bill might emerge (or die) in the coming weeks, but advocates will hope that now that it’s cleared the Judiciary, the grass will be greener down the hall in Room 34. |
Recent advances in the development of thioredoxin reductase inhibitors as anticancer agents. Redox homeostasis is crucial for the cellular viability and normal function which balance is maintained by two major cellular antioxidant systems, including glutathione system and thioredoxin system. Thioredoxin system, including thioredoxin (Trx), thioredoxin reductase (TrxR) and NADPH, exhibits a wide range of functions such as regulation of redox state and cell apoptosis. Particularly, Trx functions as a protein disulfide reductase which is essential for the function of Trx system. However, the bioactivity of Trx is closely dependent on its reducing form. According to the information, TrxR is the only cellular enzyme to catalyze the NADPH-dependent reduction of Trx. Besides the reduction of some protein disulfide like Trx, TrxR still has a broad substrate specificity to reduce some small molecules like 5, 5 '-dithiobis-2- nitrobenzoic acid (DTNB). The reduction of Trx or its own direct action towards its various substrates endows TrxR with a wide range of cellular functions. Recent studies have elucidated that TrxR was upregulated in many malignant tumors and inhibition of TrxR could prevent the tumor initiation and progression, suggesting TrxR to be a promising target for cancer therapy and the high nucleophilic and accessible selenocysteine (Sec) active site might be the prime target for drug design. Various kinds of TrxR inhibitors have been developed as anticancer agents for years. In this review, TrxR inhibitors are divided into three classes, including metal-containing inhibitors, naturally occurring products and their derivatives and other newly emerged inhibitors. The last five years reports about TrxR inhibitors of each class will be introduced and their novel inhibiting mechanisms will be discussed. |
Alternative methods in toxicology: pre-validated and validated methods The development of alternative methods to animal experimentation has progressed rapidly over the last 20 years. Today, in vitro and in silico methods have an important role in the hazard identification and assessment of toxicology profile of compounds. Advanced alternative methods and their combinations are also used for safety assessment of final products. Several alternative methods, which were scientifically validated and accepted by competent regulatory bodies, can be used for regulatory toxicology purposes, thus reducing or fully replacing living animals in toxicology experimentation. The acceptance of the alternative methods as valuable tools of modern toxicology has been recognized by regulators, including OECD, FDA and EPA. This paper provides a brief overview of the topic alternative methods in toxicology and focuses on pre-validated and validated alternative methods and their position in the modern toxicology. Introduction The development of alternative methods to animal experimentation has progressed rapidly over the last 20 years. Knowledge of alternative methods and their use in planning and conducting toxicology experiments has become essential for modern toxicologists. Alternative methods (alternative toxicology tests) are methods able to: reduce the number of animals necessary in a test, refine toxicology procedures to make them less painful or stressful to laboratory animals, or, replace animals with non-animal (in vitro, ex-vivo or in silico systems). animals, and particularly the number of toxic substances tested) (Russell & Burch, 1959). The 3Rs provide a strategy for a rational and stepwise approach to minimising animal use and suffering in experiments, without compromising the quality of the scientific work being undertaken. A number of useful alternative methods have been developed for evaluation of the potential toxic effects of chemicals and products since publication of the 3Rs principles. However, it still takes many years to implement these principles into the toxicology praxis. Since 1986, the concept of the 3Rs has been supported by laws in the EU that require researchers and investigators to use available alternatives before conducting in vivo experimentation. The 3Rs Declaration of Bologna, which was adopted in 1999 by the Third World Congress on Alternatives and Animal Use in the Life Sciences, strongly endorsed and reaffirmed the principle of the 3Rs. Today, Reduction, Refinement and Replacement are basic tenets of EU research and other policies concerning the use of animals in scientific testing and experimentation. The Council Directive 86/609/EEC on the protection of animals used for experimental and scientific purposes in article 7.2 states: "An experiment shall not be performed if another scientifically satisfactory method of obtaining the result sought, not entailing the use of an animal, is reasonably and practicably available". Article 23 further states: "The Commission and Member States should encourage research into the development and validation of alternative techniques which could provide the same level of information as that obtained in experiments using animals, but which involve fewer animals or which entail less painful procedures, and shall take such other steps as they consider appropriate to encourage research in this field. The Commission and Member States shall monitor trends inexperimental methods". As a response to articles 7 and 23 of the Council Directive 86/609/EEC, the European Centre for the Validation of Alternative Methods (ECVAM) was established in 1991. ECVAM was given the task to scientifically evaluate and validate alternative methods, to serve as an information centre, and to maintain a database on in vitro tests and validated methods. Once a method has undergone a formal validation, an independent peer-review process takes place. Subsequently, the ECVAM Scientific Advisory Committee (ESAC) gives advice on the scientific validity of the method. ECVAM also monitors the research projects funded by the European Commission, and maintains links with relevant platforms and associations devoted to reduction, refinement, and replacement (3Rs of animal use for scientific and regulatory purposes. Recently, two additional committees, PARERE (Network of European Regulators) and ESTAF (Institutions with vested interest in development and use of alternative methods) have been established to help ECVAM in identification of the most promising alternative method with regulatory relevance. Why do we need to validate alternative methods? The validation process ensures that alternative methods developed by academic or industrial scientists will be scientifically valid and thus, eventually accepted by regulatory authorities for classification and labeling, product approval or safety testing purposes. Examples of where validated methods are required to generate toxicology data include e.g.: REACH (Registration, Evaluation, Authorization of Chemicals) Cosmetic directive 76/768/EEC (VII Amendment) Classification and Labelling of Chemicals and Transport regulations Test method validation is a process based on scientifically sound principles by which the reliability and relevance of a particular test, approach, method, or process are established for a specific purpose. Reliability is defined as the extent of reproducibility of results from a test within and among laboratories over time, when performed using the same standardised protocol. Relevance of a test method describes the relationship between the test and the effect in the target species and whether the test method is meaningful and useful for a defined purpose, with the limitations identified. In brief, it is the extent to which the test method correctly measures or predicts the (biological) effect of interest, as appropriate. Regulatory need, usefulness and limitations of the test method are aspects of its relevance. New and updated test methods (both in vivo and in vitro) need to be both reliable and relevant, i.e., validated (Worth & Balls, 2004;a,b). Validation criteria for new toxicological test methods in use today were developed as collaborative efforts of lead scientists from both the in vivo and in vitro communities, regulators and other experts beginning in the early 1980's. The process was carried out under the auspices of three organizations: the Organisation for Economic Cooperation and Development (OECD), the European Centre for the Validation of Alternative Methods (ECVAM), and the Interagency Coordinating Committee on the Validation of Alternative Methods (ICCVAM). These international organizations have worked together with external experts and national organizations such as FRAME, ZEBET and CAAT on harmonizing the validation criteria so that there are no major differences between them amongst different countries and continents (Worth & Balls, 2004). Pre-validation and validation principles and criteria for how validation studies of new or updated test methods should be performed are described in detail in the OECD Guidance Document 34. Typically, there are two types of validation studies, prospective and retrospective validation. A prospective study involves generation of new data while a retrospective study re-assesses existing data. A typical prospective validation process is composed of 6 stages (see Figure 1). A retrospective study is usually limited to the evaluation of data submitted in a standardized and recommended form requested by particular organization performing the evaluation. A test is considered validated when its performance characteristics, advantages, and limitations have been adequately determined for a specific purpose. The measurement of a test's reliability and relevance and required for both types of validation studies. Predictive ability and reliability of a test is judged by: Sensitivity: the percentage of positive chemicals correctly identified. Specificity: the percentage of negative chemicals correctly identified. Predictivity: the percentage of predictions for a particular classification, which were correct. Accuracy: the overall percentage of correct classifications. Other parameters assessed by the biostatistician during validation are: Reproducibility within laboratories -concordance of the classifications between 3 and more independent runs in single laboratory. Reproducibility between laboratories -concordance of the classifications between laboratories. Probability for correct classification. Alternative methods and models used for reduction & replacement The following systems can be used as partial or full replacements of animals in toxicology experiments: i) in vitro methods (primary cultures, finite lifespan cell lines, continuous cell lines, reconstructed 3D tissues), ii) ex vivo methods (isolated animal tissues and organs) and iii) in silico methods: computer simulations and mathematical models, QSAR's etc. Depending on the objective of the study, correctly selected in vitro methods in combination with a deep knowledge of the tested compounds (obtained from databases or computer simulations/QSARs, analytical chemistry, etc.) may be more appropriate for certain areas of interest than their animal counterparts. Subsequent Endorsement National/ Regional (for methods not yet accepted internationally) International acceptance Genotoxicity Bacterial reverse mutation (Ames) test In vitro OECD TG 471 In vitro cell gene mutation test In vitro OECD TG 476 In vitro chromosomal aberration test In vitro OECD TG 473 In vitro Limitations of most in vitro tests: interactions between tissues and organs cannot be tested, with most in vitro test systems, in vivo doseresponses cannot be obtained for human risk assessment, systemic effects cannot be evaluated, pharmacokinetics cannot be evaluated, chronic effects cannot yet be tested, technical limitations: solubility, reaction with plastics, lack of in vivo-like barrier properties. Overview of alternative methods validated and endorsed by ECVAM, ICCVAM, OECD or other regulatory organisations Once a method has been scientifically validated, it can enter the process of regulatory acceptance and guideline adoption. Regulatory acceptance procedures vary among countries as well as among regulatory agencies within the same country. Therefore, the Organization for Economic Cooperation and Development (OECD) promotes the harmonization of international regulatory acceptance providing the Guidance Document (GD) on the Validation and International Acceptance of New or Updated Test Methods for Hazard Assessment. Adherence to the principles defined in the OECD GD 34 increases the likelihood of the adoption of the a new or modified method. Table 1 is adopted with minor modification from the AltTox web-site, (www.alttox.org) and provides an overview of the alternative toxicity test methods that are considered valid according to accepted international criteria. The test methods listed in this table have been judged to be scientifically valid by ECVAM, ICCVAM, JaCVAM and the OECD. Although uncommon, a test method may also be accepted for regulatory use without formal validation. Conclusion A number of validated and pre-validated methods exist that can be used as partial or full replacements of animal experiments (e.g. genotoxicity, testing for local toxicity effects as skin corrosion, irritation, quality control of biologicals, production of monoclonal antibodies, safety testing of final cosmetic products). As proven by several international validation studies, alternative methods have potential to reduce the number EDQM/European Pharmacopeia 1 All in vitro and ex vivo methods listed; in vivo methods proposed to reduce or refine animal use also listed 2 Replaces animal use for initial dose setting, but in vivo test required to complete assessment 3 TA assay is in process of being formally validated, but included here because of OECD TG 4 Subject to product-specific validation to demonstrate equivalence to the rabbit pyrogen test (RPT) 5 Only positive test results accepted in the 2007 endorsement 6 ICCVAM recommendations being finalized of test animals needed for experiments or even replace the whole animal test. Testing strategies combining in vitro, ex vivo and in silico methods could be successful for areas where a single alternative method may currently be failing. When developing alternative methods for more complex toxicity endpoints, it will be necessary to investigate the toxicology pathways and mechanisms of toxic action. At the same time, we will need to reconsider the predictive ability of the traditional animal tests and their concordance with effects observed in man. These considerations will greatly enhance our ability to produce relevant and reliable alternative methods for prediction of human health effects. |
/** Editing support for the project name column */
private class ProjectNameEditingSupport extends EditingSupport {
private ProjectNameEditingSupport(ColumnViewer viewer) {
super(viewer);
}
@Override
protected void setValue(Object element, Object value) {
ImportedProject project = (ImportedProject) element;
project.setProjectName(value.toString());
mCheckboxTableViewer.update(element, null);
updateValidity();
validatePage();
}
@Override
protected Object getValue(Object element) {
ImportedProject project = (ImportedProject) element;
return project.getProjectName();
}
@Override
protected CellEditor getCellEditor(Object element) {
return new TextCellEditor(mTable);
}
@Override
protected boolean canEdit(Object element) {
return true;
}
} |
Posted by Marc Lee under Alberta, BC, climate change, oil and gas, regulation.
August 8th, 2012
Comments: 1
The federal government released an updated Canada’s Emission Trends 2012 report today. In a remarkable shift in federal rhetoric just this past week, the Harperites now appear to be more sensitive to concerns about the Enbridge pipeline and climate change more generally. But appearances can be deceiving and there is good reason to believe the current charm offensive is just a recasting of business-as-usual in recognition of just how offside the government has been on climate file.
Here is Environment Minister Peter Kent in a speech delivered today:
Canada is now half way to its target of reducing total greenhouse gas emissions by 17 percent from 2005 levels by 2020. This is the result of the Harper Government’s realistic, sector-by-sector approach to greenhouse gas regulations that is reducing emissions, while continuing to create jobs and encouraging economic growth. Last year, we were one-quarter of the way to our goal. And now we’re half way there. This shows the significant progress we are making in meeting our Copenhagen commitment.
First of all, it is worth recalling that the Harper government abandoned the Kyoto Protocol and its target of emissions 6% below 1990 levels by 2012 (which works out to a drop from 589 Mt CO2 equivalent in 1990 to 554 Mt in 2012). In its place they made a commitment in the 2009 Copenhagen Agreement to a lesser target of 17% below 2005 levels by 2020 (a drop from 740 Mt in 2005 to 607 Mt in 2020). Kent claims the government is halfway there, but the modeling in Canada’s Emission Trends projects that in 2020 emissions will be 720 Mt. This is only 2.7% below 2005 levels, nowhere close to a 17% reduction.
So where does halfway come in? The report creates an alternative “do nothing” scenario that leads to surging emissions of 850 Mt in 2020, then subtracts estimates of federal and provincial actions taken to date, which gets us to 720 Mt. That is a lot of spin on the numbers, perhaps hoping few people will bother to read the actual report and just report the government’s line.
But even the 720 Mt number is suspect for a variety of reasons. First, the economic downturn lowers the baseline, with 2010 emissions coming in lower than previously expected. Next, the feds have included land use changes in the calculations for the first time. The decline in forestry activity means more carbon will remain sequestered in Canada’s forests, and this will lower emissions by 26 Mt. Take these out and the 2020 estimate jumps back to 745 Mt, slightly more than 2005 levels. Then report also arbitrarily lowers emissions from BC’s natural gas sector, relative to National Energy Board projections. And it assumes that proposed regulations yet to be ratified contribute to decreased transportation emissions. And that Saskatchewan implements carbon capture and storage for four new coal-fired power plants.
The other matter is to what extent the feds can take credit for any reductions in various sectors of the Canadian economy. Much of the heavy lifting appears to have been done at the provincial level, including BC’s carbon tax, Quebec’s cap-and-trade regime and Ontario’s phase-out of coal-fired electricity. In transportation, new vehicle regulations are touted but these just copy the new regulations established by the United States. The report makes repeated reference to new regulations being developed for the oil and gas sector, too, but these have been promised for a long time and no draft regulations have yet seen the light of day.
The report is not a complete write-off. It highlights clearly the emissions growth coming from Alberta, whose emissions will increase 48 Mt between 2010 and 2020, a rise of 20%. BC is the other culprit, with an increase of 16 Mt between 2010 and 2020, a rise of 29%. For BC, the feds are effectively outing the provincial government for failing to comply with its legislated GHG reduction target of a one-third decrease in 2020 relative to 2007 levels. The BC government, by pursuing a massive expansion of natural gas for export as LNG (liquified natural gas) to Asia, is breaking its own law.
All other provinces and territories either stay level or show projected decreases in emissions. Ontario, in particular, is expected to see emission reductions, primarily due to the phase out of coal.
By industry sector, it is no surprise that oil and gas is driving emissions growth in Canada. Emissions are expected to surge by 50 Mt per year between 2010 and 2020, an increase of almost one-third. Within that sector, oil sands emissions more than double (and relative to 2005, more than triple). Put another way, Canada could easily meet its 2020 target and then some if it stopped being a peddler of fossil fuels to export markets. In fact, the GHG emissions associated with exports make the underlying situation much worse because the carbon content of the fuels combusted in other nations does not count in Canada’s inventory. Amanda Card and I estimated Canada’s exported GHG emissions to be 115% of our own emissions from burning fossil fuels.
Smoke, meet mirror. The change in tone from Minister Kent is welcome, but when you look closely there is a credibility gap on the climate file at least as wide as Canada’s emissions gap. |
A few weeks ago, we reported a new Tesla Model S hearse concept commissioned by a funeral transport company based in the Netherlands and built by a stretched limousine maker. At the time, the companies only released highly photoshopped images and insisted that the vehicle was the real deal.
Now that they officially unveiled it, we can confirm that it is real and actually quite impressive…
Our first post about the concept was quite popular and you guys had some fun with your sick sense of humor.
Leon wrote: “Now the only emissions will come from the body…”, and egoman added: “I bet it will be popular. People will be dying to ride in one.”
All jokes aside, RemetzCar, the company behind the concept, did an impressive job retrofitting the sedan to fit a full-size coffin. They cut the vehicle in half after having removed the battery pack. Then, they extended the base by 80 cm (31.5 inch) before fitting the battery pack back into the platform.
They released new pictures and it looks very good:
At first glance, it looks like quality work and they managed to stay relatively true to Tesla’s lines despite the significantly new shape. It would be interesting to know how the modifications affect the range and performance of the vehicle. We reached out to the company for more details, but we didn’t get a respond. We will update if it changes.
Of course, it is clearly a hearse and for funeral transport only, but it also actually gives us a good idea of what the Model S could look like as a wagon – almost.
Additionally and as a reader pointed in the last article, if you remove the glass top, you end up with an El Camino-like Tesla Model S. |
// createBustMiddleware returns a middleware that will bust the cache
// for entries that match the patterns when the routes that the middleware is applied to are matched.
func createBustMiddleware(patterns []string) func(*fiber.Ctx) error {
return func(ctx *fiber.Ctx) error {
dataCache := ctx.Locals("cache").(*cache.LRUCache)
matchedEntries := dataCache.Match(patterns, ctx.AllParams())
dataCache.Bust(matchedEntries...)
ctx.Next()
return nil
}
} |
Torontonians want to know what Justin Trudeau will do for them. Yet even before the electoral dust has settled, his government’s promise to kill the expansion of the Billy Bishop Toronto Island Airport is a gift that will keep on giving. Though the battle rages still, it’s clear the political headwinds have shifted. Adding 200 metres to each end of the runway to accommodate passenger jets in an operation that has no place being there in the first place has less support than ever.
An expansion of Billy Bishop airport appears less likely under the new Liberal government. ( DAVID COOPER / TORONTO STAR )
That, of course, means little in a city where even a proposal as inappropriate as this is taken seriously — and at the highest levels. The campaign led by Porter Airlines’ Robert Deluce has been extremely effective. His fans are numerous, especially among those who don’t live downtown. But all the public relations in the world can’t make a silk purse of a sow’s ear, or turn a potential city-wrecker into a desirable urban amenity. Even as it is, the kids who attend nearby Harbourfront Public School may breathe some of the most polluted air in Toronto. It doesn’t come from Porter’s turboprops, but from the cabs that serve them. At one point, a Porter ally suggested the school, which also houses a community centre, should be torn down for a parking lot. That anyone could float such a desperate proposal shows how high the stakes are. It also reveals the sense of unreality that has surrounded the file from the start.
Article Continued Below
Flying jets in and out of Billy Bishop would make a bad situation worse. And given the billions of dollars, both public and private, that have been poured into revitalizing the waterfront, sacrificing valuable land on the altar of corporate hubris would be self-defeating. “The commitment we made during the election holds,” insists Toronto MP Adam Vaughan. “There are a lot of competing interests on the waterfront. The tripartite agreement sets the right balance and has given us the waterfront we have today.” Transportation Minister Marc Garneau, has insisted no decision would be made until after the issue has (yet again) been thoroughly studied, but late Thursday evening tweeted confirmation that there’s no intention to reopen the tripartite agreement.
If Vaughan is right that the Liberals won’t allow jets into the island airport, his party has done Toronto a huge favour. This is the sort of decision that eludes the city. Even when Deluce wrote to Toronto’s ex-mayor and big supporter, Rob Ford, in 2013 asking the city to approve expansion, he got nowhere. The city isn’t mature enough to do the right thing. Bright shiny objects in the sky are enough to dazzle council long enough to sway opinions. The former federal government’s record on the issue wasn’t wildly reassuring, either.
Deluce wants the environmental assessment to continue regardless. Seemingly he hopes it will find the island perfect for jets. That report is expected early next year.
Article Continued Below
But the tripartite agreement that governs the airport can only be redrawn if all three parties — federal government, city and PortsToronto — agree. As of two weeks ago, it seems at least one doesn’t. Deluce’s next argument will be that the CSeries jets he wants are important to the economic health of its manufacturer, Bombardier. In Toronto, however, where the company has failed to deliver the streetcars it’s building for the TTC in timely fashion, its name is synonymous with corporate ineptitude. A yes from Garneau would mean flying in the face of his colleagues from Canada’s most powerful city. As a former astronaut, Garneau surely knows the importance of a soft landing. Christopher Hume can be reached at chume@thestar.ca |
def mergeRows(self, df1, df2):
df_new = pd.concat([df1, df2])
return df_new |
/*
* Copyright © 2015 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
/**
* This package contains the WikipediaPipeline Application that demonstrates a CDAP Workflow for processing and
* analyzing Wikipedia data.
* <p>
* The app contains a CDAP Workflow that runs in either online or offline mode.
* In the offline mode, it expects Wikipedia data to be available in a Stream.
* In the online mode, it attempts to download wikipedia data for a provided set of page titles
* (formatted as the output of the Facebook Likes API). Once wikipedia data is available it runs a map-only job to
* filter bad records and normalize data formatted as text/wiki-text into text/plain.
*
* It then runs two analyses on the plain text data in a fork:
* </p>
*
* <ol>
* <li>
* {@link co.cask.cdap.examples.wikipedia.ScalaSparkLDA} runs topic modeling on Wikipedia data using Latent
* Dirichlet Allocation (LDA).
* </li>
* <li>
* {@link co.cask.cdap.examples.wikipedia.TopNMapReduce} that produces the Top N terms in the supplied Wikipedia
* data.
* </li>
* <li>
* The output of the above analyses is stored in the following datasets:
* <ul>
* <li>A Table named lda which contains the output of the Spark LDA program.</li>
* <li>A KeyValueTable named topn which contains the output of the TopNMapReduce program.</li>
* </ul>
* </li>
* </ol>
*
* <p>
* One of the main purposes of this application is to demonstrate how the flow of a typical data pipeline can be
* controlled using Workflow Tokens.
* </p>
*/
package co.cask.cdap.examples.wikipedia;
|
def vcr_config():
def response_cleaner(response):
bloat_headers = [
"Content-Security-Policy",
"Expect-CT",
"ETag",
"Referrer-Policy",
"Strict-Transport-Security",
"Vary",
"Date",
"Server",
"Connection",
"Set-Cookie",
]
for h in response["headers"].copy():
if h.startswith("X-") or h.startswith("CF-") or h in bloat_headers:
response["headers"].pop(h)
return response
return {
"filter_headers": [
("cookie", "PRIVATE"),
"Accept",
"Accept-Encoding",
"Connection",
"User-Agent",
],
"before_record_response": response_cleaner,
"decode_compressed_response": True,
} |
<filename>src/main/java/com/sg/tuts/web/controllers/ContactController.java
package com.sg.tuts.web.controllers;
import java.util.ArrayList;
import java.util.List;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import com.sg.tuts.core.models.Contact;
import com.sg.tuts.web.helpers.ContactHelper;
@Controller
@RequestMapping("/contacts")
public class ContactController {
private List<Contact> contacts = new ArrayList<Contact>();
@RequestMapping(method = {RequestMethod.GET})
public @ResponseBody List<Contact> getIndex() {
if(contacts.size() <= 2){
contacts = ContactHelper.getContacts();
}
return contacts;
}
@RequestMapping(method = { RequestMethod.POST})
public @ResponseBody List<Contact> saveContact(@RequestBody ModelMap map) {
Contact contact = ContactHelper.getContact(map);
contacts.add(contact);
return contacts;
}
}
|
<reponame>suluner/tencentcloud-sdk-cpp
/*
* Copyright (c) 2017-2019 THL A29 Limited, a Tencent company. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef TENCENTCLOUD_IECP_V20210914_MODEL_EDGENODERESOURCEINFO_H_
#define TENCENTCLOUD_IECP_V20210914_MODEL_EDGENODERESOURCEINFO_H_
#include <string>
#include <vector>
#include <map>
#include <tencentcloud/core/utils/rapidjson/document.h>
#include <tencentcloud/core/utils/rapidjson/writer.h>
#include <tencentcloud/core/utils/rapidjson/stringbuffer.h>
#include <tencentcloud/core/AbstractModel.h>
namespace TencentCloud
{
namespace Iecp
{
namespace V20210914
{
namespace Model
{
/**
* 边缘节点资源信息
*/
class EdgeNodeResourceInfo : public AbstractModel
{
public:
EdgeNodeResourceInfo();
~EdgeNodeResourceInfo() = default;
void ToJsonObject(rapidjson::Value &value, rapidjson::Document::AllocatorType& allocator) const;
CoreInternalOutcome Deserialize(const rapidjson::Value &value);
/**
* 获取可使用的CPU 单位: m核
注意:此字段可能返回 null,表示取不到有效值。
* @return AllocatedCPU 可使用的CPU 单位: m核
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string GetAllocatedCPU() const;
/**
* 设置可使用的CPU 单位: m核
注意:此字段可能返回 null,表示取不到有效值。
* @param AllocatedCPU 可使用的CPU 单位: m核
注意:此字段可能返回 null,表示取不到有效值。
*/
void SetAllocatedCPU(const std::string& _allocatedCPU);
/**
* 判断参数 AllocatedCPU 是否已赋值
* @return AllocatedCPU 是否已赋值
*/
bool AllocatedCPUHasBeenSet() const;
/**
* 获取CPU总量 单位:m核
注意:此字段可能返回 null,表示取不到有效值。
* @return TotalCPU CPU总量 单位:m核
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string GetTotalCPU() const;
/**
* 设置CPU总量 单位:m核
注意:此字段可能返回 null,表示取不到有效值。
* @param TotalCPU CPU总量 单位:m核
注意:此字段可能返回 null,表示取不到有效值。
*/
void SetTotalCPU(const std::string& _totalCPU);
/**
* 判断参数 TotalCPU 是否已赋值
* @return TotalCPU 是否已赋值
*/
bool TotalCPUHasBeenSet() const;
/**
* 获取已分配的内存 单位G
注意:此字段可能返回 null,表示取不到有效值。
* @return AllocatedMemory 已分配的内存 单位G
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string GetAllocatedMemory() const;
/**
* 设置已分配的内存 单位G
注意:此字段可能返回 null,表示取不到有效值。
* @param AllocatedMemory 已分配的内存 单位G
注意:此字段可能返回 null,表示取不到有效值。
*/
void SetAllocatedMemory(const std::string& _allocatedMemory);
/**
* 判断参数 AllocatedMemory 是否已赋值
* @return AllocatedMemory 是否已赋值
*/
bool AllocatedMemoryHasBeenSet() const;
/**
* 获取内存总量 单位G
注意:此字段可能返回 null,表示取不到有效值。
* @return TotalMemory 内存总量 单位G
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string GetTotalMemory() const;
/**
* 设置内存总量 单位G
注意:此字段可能返回 null,表示取不到有效值。
* @param TotalMemory 内存总量 单位G
注意:此字段可能返回 null,表示取不到有效值。
*/
void SetTotalMemory(const std::string& _totalMemory);
/**
* 判断参数 TotalMemory 是否已赋值
* @return TotalMemory 是否已赋值
*/
bool TotalMemoryHasBeenSet() const;
/**
* 获取已分配的GPU资源
注意:此字段可能返回 null,表示取不到有效值。
* @return AllocatedGPU 已分配的GPU资源
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string GetAllocatedGPU() const;
/**
* 设置已分配的GPU资源
注意:此字段可能返回 null,表示取不到有效值。
* @param AllocatedGPU 已分配的GPU资源
注意:此字段可能返回 null,表示取不到有效值。
*/
void SetAllocatedGPU(const std::string& _allocatedGPU);
/**
* 判断参数 AllocatedGPU 是否已赋值
* @return AllocatedGPU 是否已赋值
*/
bool AllocatedGPUHasBeenSet() const;
/**
* 获取GPU总量
注意:此字段可能返回 null,表示取不到有效值。
* @return TotalGPU GPU总量
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string GetTotalGPU() const;
/**
* 设置GPU总量
注意:此字段可能返回 null,表示取不到有效值。
* @param TotalGPU GPU总量
注意:此字段可能返回 null,表示取不到有效值。
*/
void SetTotalGPU(const std::string& _totalGPU);
/**
* 判断参数 TotalGPU 是否已赋值
* @return TotalGPU 是否已赋值
*/
bool TotalGPUHasBeenSet() const;
/**
* 获取可使用的CPU 单位: m核
注意:此字段可能返回 null,表示取不到有效值。
* @return AvailableCPU 可使用的CPU 单位: m核
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string GetAvailableCPU() const;
/**
* 设置可使用的CPU 单位: m核
注意:此字段可能返回 null,表示取不到有效值。
* @param AvailableCPU 可使用的CPU 单位: m核
注意:此字段可能返回 null,表示取不到有效值。
*/
void SetAvailableCPU(const std::string& _availableCPU);
/**
* 判断参数 AvailableCPU 是否已赋值
* @return AvailableCPU 是否已赋值
*/
bool AvailableCPUHasBeenSet() const;
/**
* 获取可使用的内存 单位: G
注意:此字段可能返回 null,表示取不到有效值。
* @return AvailableMemory 可使用的内存 单位: G
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string GetAvailableMemory() const;
/**
* 设置可使用的内存 单位: G
注意:此字段可能返回 null,表示取不到有效值。
* @param AvailableMemory 可使用的内存 单位: G
注意:此字段可能返回 null,表示取不到有效值。
*/
void SetAvailableMemory(const std::string& _availableMemory);
/**
* 判断参数 AvailableMemory 是否已赋值
* @return AvailableMemory 是否已赋值
*/
bool AvailableMemoryHasBeenSet() const;
/**
* 获取可使用的GPU资源
注意:此字段可能返回 null,表示取不到有效值。
* @return AvailableGPU 可使用的GPU资源
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string GetAvailableGPU() const;
/**
* 设置可使用的GPU资源
注意:此字段可能返回 null,表示取不到有效值。
* @param AvailableGPU 可使用的GPU资源
注意:此字段可能返回 null,表示取不到有效值。
*/
void SetAvailableGPU(const std::string& _availableGPU);
/**
* 判断参数 AvailableGPU 是否已赋值
* @return AvailableGPU 是否已赋值
*/
bool AvailableGPUHasBeenSet() const;
private:
/**
* 可使用的CPU 单位: m核
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string m_allocatedCPU;
bool m_allocatedCPUHasBeenSet;
/**
* CPU总量 单位:m核
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string m_totalCPU;
bool m_totalCPUHasBeenSet;
/**
* 已分配的内存 单位G
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string m_allocatedMemory;
bool m_allocatedMemoryHasBeenSet;
/**
* 内存总量 单位G
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string m_totalMemory;
bool m_totalMemoryHasBeenSet;
/**
* 已分配的GPU资源
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string m_allocatedGPU;
bool m_allocatedGPUHasBeenSet;
/**
* GPU总量
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string m_totalGPU;
bool m_totalGPUHasBeenSet;
/**
* 可使用的CPU 单位: m核
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string m_availableCPU;
bool m_availableCPUHasBeenSet;
/**
* 可使用的内存 单位: G
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string m_availableMemory;
bool m_availableMemoryHasBeenSet;
/**
* 可使用的GPU资源
注意:此字段可能返回 null,表示取不到有效值。
*/
std::string m_availableGPU;
bool m_availableGPUHasBeenSet;
};
}
}
}
}
#endif // !TENCENTCLOUD_IECP_V20210914_MODEL_EDGENODERESOURCEINFO_H_
|
/*
* Copyright (c) 2016-2018 <NAME>
* LinkedIn: https://www.linkedin.com/in/juminrubin/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (c) 2016-2018 <NAME>
* LinkedIn: https://www.linkedin.com/in/juminrubin/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jrtech.engines.fsm.model;
import java.util.HashSet;
import java.util.Set;
/**
* An abstract definition of an action in a design time. This class also represents a specification of the runtime <code>org.jrtech.engines.fsm.Action</code>.
*
* @author Jumin
*
*/
public abstract class AbstractActionDefinition extends AbstractParameterizableDefinition {
private static final long serialVersionUID = -4516828963659036280L;
private String outputEvent;
private Set<String> alternateEvents;
public AbstractActionDefinition() {
super();
this.alternateEvents = new HashSet<>();
}
public String getOutputEvent() {
return outputEvent;
}
public void setOutputEvent(String outputEvent) {
this.outputEvent = outputEvent;
internalLabel = null;
}
public Set<String> getAlternateEvents() {
return alternateEvents;
}
public void setAlternateEvents(Set<String> alternateEvents) {
this.alternateEvents = alternateEvents;
}
@Override
public void setName(String name) {
super.setName(name);
internalLabel = null;
}
public boolean isValid() {
if (getDefinitionId() == null || "".equals(getDefinitionId().trim())) return false;
if (getName() == null || "".equals(getName().trim())) return false;
if (outputEvent == null || "".equals(outputEvent.trim())) return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((alternateEvents == null) ? 0 : alternateEvents.hashCode());
result = prime * result + ((outputEvent == null) ? 0 : outputEvent.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
AbstractActionDefinition other = (AbstractActionDefinition) obj;
if (alternateEvents == null) {
if (other.alternateEvents != null)
return false;
} else if (!alternateEvents.equals(other.alternateEvents))
return false;
if (outputEvent == null) {
if (other.outputEvent != null)
return false;
} else if (!outputEvent.equals(other.outputEvent))
return false;
return true;
}
@Override
public String toString() {
if (internalLabel == null) {
internalLabel = getName() + " - " + "(" + getScope() + "): " + outputEvent;
}
return internalLabel;
}
}
|
A Comparative Study of the Effect of Ultrasound and Electrostimulation on Wound Healing in Rats &NA; A comparative study has been carried out to investigate the effects of electrical stimulation and ultrasound on wound healing. Eightyfour female rats were divided into four groups depending on the treatment received. The first group was given electrical stimulation of 300 &mgr;A direct current, 30 minutes daily, starting with negative polarity and then changed after 3 days of treatment. Group 2 received sham electrostimulation treatment. The third group received 0.1 W/cm2 pulsed ultrasound using the moving applicator technique for 5 minutes a day. Group 4 received sham ultrasound treatment. A total of 7 days of treatment was given to all groups. Histopathologic and biochemical analyses on the fourth and seventh days and wound breaking strength on the twentyfifth day were performed for all groups. By accelerating the inflammatory phase, electrical stimulation had progressed the proliferative phase of wound healing earlier than ultrasound had done. Both electrical stimulation and ultrasound have positive effects on proliferative phases, but electrical stimulation was superior to ultrasound at the maturation phase. There was no difference between the two experimental groups on the mast cell reduction effect. Although ultrasound treatment may seem to be efficient in terms of time, when the effects of electrical stimulation and ultrasound on wound healing with the methods employed in our study are considered, it is concluded that electrical stimulation is a means of treatment superior to ultrasound in wound healing. (Plast. Reconstr. Surg. 100: 966, 1997.) |
Time-Varying Asymmetric Barrier Lyapunov Function-Based Impact Angle Control Guidance Law With Field-of-View Constraint A prerequisite for implementing precision strike is to keep the target locked within the seekers field-of-view (FOV). To address this constraint, a novel impact angle control guidance law based on the dynamic surface control and time-varying asymmetric Barrier Lyapunov Function is proposed. The FOV constraint, in this article, is equally transformed to a time-varying asymmetric limitation on the missile-target relative velocity perpendicular to the line-of-sight (LOS). Under the proposed guidance law, the velocity component is prevented from overstepping its limitation and eventually approaches zero, thus satisfying the homing requirement and the FOV constraint in an integrated manner. Compared to previous studies, the proposed guidance law does not require the target information and is applicable against moving targets. Also, the proposed guidance law does not use any switching logic, and consequently the synthesized guidance command is free from abrupt-jumping phenomenon. Numerical simulations and performance comparison fully demonstrates the effectiveness and superiority of the proposed guidance law. |
Lorentz Invariance Violation Limits from the Crab Pulsar using VERITAS Quantum gravity (QG) theories over the past fifty years have sought to understand the relationship between the four fundamental interactions. A major insight gained in this area is that all interactions could possibly unify at Planck-scale energies ~10$^{19}$ GeV. A potential consequence of the unification of gravity with the other three interactions would be a breaking of Lorentz symmetry at Planck-scale energies. The interpretation of Time-of-flight (TOF) measurements from gamma-ray telescopes have been able to put constraints on the energy scales of the Lorentz-invariance violations (LIV). The Crab pulsar, the only pulsar detected at very high energies (VHE, E>100 GeV) presents a unique opportunity to put new constraints on LIV. Presented here are the results of observations of the Crab pulsar with VERITAS and statistical methods to determine limits of LIV effects from energy-dependent timing differences. Introduction The Crab Nebula is believed to be the remnant of a supernova observed in 1054 A.D.; nine hundred and fourteen years later, a high- pulsar with a period of ∼33 ms was discovered in the system. All 117 known -ray pulsars, including the Crab, show a spectral cutoff above a few GeV. Traditional pulsar models attribute this cutoff to curvature radiation originating within the magnetosphere. Measuring the spectral break energy and cutoff shape helps to constrain these models. MAGIC's observations of the Crab pulsar revealed significant pulsations at 25 GeV with hints of signal at energies higher than 60 GeV. For the first time, the possibility existed of a non-exponential cutoff in the spectrum of a pulsar. Pulsed emission was later detected by VERITAS above 120 GeV, rejecting the exponential cutoff model at the 5.6 level (see Figure 1). In several quantum gravity models and Standard-Model Extension scenarios, deviations from Lorentz symmetry could emerge from an underlying unified theory. Due to a possible foamy nature of space-time, the speed of light in a vacuum could vary depending on the energy of a particle (for a recent review, see ). The energy scale for these violations, E QGn, could therefore be constrained by timeof-arrival differences between photons of different energy originating from the same source. Any sort of time-offlight (TOF) testing for these violations would require -ray sources, with fast variability that are seen at astronomical distances, such as AGN, GRBs and the Crab pulsar above 120 GeV. In many cases the speed of light for a photon with energy E can be expanded as: where E QGn is the energy scale where LIV effects are relevant and c 0 is the speed of light at low energies, 310 8 m/s. As an example, for a object a distance D away, if two photons were emitted simultaneously with energies E h and E l with E h > E l, the time difference measured by the detector is: if the linear term is dominant. s ± is equal to +1 in the sub-luminal case, and -1 in the super-luminal case. If the quadratic term is dominant, then the timing difference is: Typically either E QG1 or E QG2 dominates, and TOF measurements have been able to constrain these quantities. The Crab Pulsar currently (Spring 2013) presents a unique opportunity for LIV TOF measurements, and there are several reasons why it makes a tempting target for these types of studies:. VERITAS collected 107 hours of low zenith angle observations on the Crab from the start of four-telescope operations in 2007 through 2011. Data quality selection requires a clear atmospheric conditions, based on infrared sky temperature measurements and nominal hardware operation. Event selection that was applied to the data was optimized a priori by assuming a powerlaw spectrum with an index of -4.0 and a normalization of a few percent of the Crab at 100 GeV. Data reduction followed the standard methods, yielding consistent results with two analysis packages. The Jordell Bank timing ephemeris was used to obtain the timing parameters for the pulsar analysis. Barycentering was done with two custom codes and with tempo2. Applying the H-test to this data set yields a H value of 50, corresponding to a 6.0 significance. Defining the significance for pre-chosen ON and OFF regions of the pulse profile according to Li & Ma gives a 8.8 significance. An unbinned maximum-likelihood fit determined the positions of P1 and P2 to be -0.0023±0.0020 and 0.0398±0.002, respectively. The ratio of the number of pulsed events in P2 over the number of pulsed events in P1 is 2.4±0.6. The pulse profiles measured by VERITAS and the Fermi-LAT is shown in Figure 2. is included, the exact data set used for. The Fermi-LAT pulse profile is also shown below the VERITAS pulse profile). Peak Timing Comparison The pulse profile of the VERITAS data above 120 GeV is compared to the pulse profile of the Fermi-LAT data above 100 MeV. If the same timing solutions are used for both data sets, then the peak positions agree within statistical uncertainty. This indicates no measurable violations of Lorentz invariance, so a lower limit on E QGn is therefore calculated, using Equations 2 and 3. The 95% confidence upper limit on the timing of the peaks is calculated to be less than 100 s. The limits of the linear LIV term is therefore: Dispersion Cancellation The method described in the previous section relies on binning the data in both energy and in pulsar phase. Techniques involving binning always involve a loss of information. Additionally, binning in energy is not ideal because of the variations of the pulse period within the energy bins due to pulsar spin-down. The ideal methodology for LIV, if possible, should be unbinned in both energy and time (or phase in this case). The large -ray background due to the Crab Nebula provides additional problems. This section discusses a variation of the Dispersion Cancellation (DisCan) method, that is well-suited to use for pulsars. The method here utilizes the Z 2 m test as a test statistic. The Z 2 m test is derived from a Fourier-series estimator which tests for variations from a uniform (unpulsed) light curve for a chosen Fourier harmonic m. Z 2 m is proportional to the Fourier power of the pulsar. LIV effects would introduce a dispersion of the pulsar signal. The maximal value of Z 2 m, therefore, corresponds to the Fourier power of the undispersed signal. Z 2 m takes the form: where N is the total number of events and i is the phase of the i th event (mod 1). The procedure used is as follows: 1. Adopt a model for a correction to the arrival time of each event, as a function of the event energy. For example, if the LIV effect has the form of E n, then the correction for an event of arrival time of t i and energy E i is: 2. Refold the pulsar phases according to the formula above for a choice of. where t 0,i is the pulsar epoch, and, is the pulsar frequency and 1st derivative of the pulsar frequency, respectively. could hypothetically be any real number, positive or negative, but with some common sense it can be narrowed down. LIV effects are small at GeV/TeV scales, not significant enough to drastically change the intrinsic shape of the pulse profile. The pulsed spectrum of the Crab pulsar extends to ∼400 GeV. It is therefore unlikely that a photon at 400 GeV will move in phase more than 5% of the pulse period of Crab in either direction due to LIV effects. This limits the range in to | 400 GeV/(0.05* 33 ms) | = | | < 4.1s/GeV. 3. Calculate Z 2 m as above in equation 5. Repeat steps 2 through 4 on several bootstrapped or Monte Carlo (MC) data sets to determine a probability distribution function (PDF) that will determine test significance and limits. In addition to being unbinned in both phase and energy, this approach uses all photons (P1, P2 and background events), removing all potential trial factors except from for choice of m, which is determined to be 20 from a MC optimization. Results of MC tests of this approach are shown in Figure 3. It should be noted that 20 was the optimal value only investigating the pulse profile taken with VERITAS from. Other pulse profiles from other pulsars or even an updated pulse profile for the Crab with data taken after 2011 could change the optimal value. The Z 2 20 DisCan test, when applied to the data set from the 2007 to 2011 seasons, has a maximal value at = -0.49 s/GeV. The of plot Z 2 20 against trial values of is shown in Figure 4. To determine the statistical significance and limits of this test, a PDF is produced by one thousand MC realizations of the energy distribution. The Z 2 20 DisCan test is applied to each one. The distribution of the max values is the PDF. Figure 5 shows the PDF produced. The maximum found in the data, -0.49s/GeV, has a significance of 1.4 away from the null result of =0. The LIV energy scale is related to by: To place lower and upper bounds on, Bayes theorem was used to determine the cumulative posterior PDF with the The green dashed line is the maximum Z 2 value at max = -0.49s/GeV. likelihood PDF derived from MC simulations shown in figure 5. It is assumed that the shape of the likelihood PDFs is independent on the value of used in the simulations. With this method, 95% confidence limits for of -1.2 s/GeV and 1.1 s/GeV were derived for the lower and upper limits, respectively. The Crab pulsar is located 2 kpc away, giving a sub-luminal limit of the linear energy scale of E QG1 >1.910 17 GeV and a super-luminal limit of E QG1 >1.710 17 GeV. Conclusions This work presented two very different methods for measuring LIV from the Crab Pulsar which both yielded similar limits. The limits obtained from the peak timing differences here are comparable to limits found from MAGIC with Mrk 501 data, an order of magnitude below limits found with AGN from HESS and less than two orders of magnitude below the Planck mass scale. The dispersion cancellation method showed a possible hint of towards the super-luminal case. It should be noted that the bounds determined by the dispersion cancellation are likely to have large errors associated with them, due to a small number of statistics in the tails in the probability distribution in figure 5. The method discussed here can be improved in the future by using greater than one thousand trials for the probability distribution. While the Crab Pulsar does not currently have the best sensitivity to LIV measurements, there is still merit to the result. Some postulate that LIV effects would not be isotropic, so multiple results from different targets could constrain the anisotropy of the effect. Finally, it is important to get measurements at multiple distances to completely eliminate any sort of intrinsic effect, since they would not be dependent on redshift. It is possible that the Crab is not unique as a VHE emitting pulsar, and the methods mentioned here could certaintly be used for any pulsar. Hypothetically, if a millisecond pulsar was discovered at VHE energies, it would have ∼10 times the frequency, and therefore ∼10 times the LIV sensitivity to the linear term. As mentioned earlier with more observing time this limit 33ND could be greatly improved. A small improvement could be gained by adding Fermi-LAT data to the DisCan method, although the method is more sensitive at higher energies. Additional data will improve the signal-to-noise of the pulse profile which will improve the limits, as well as extend the spectrum of the pulsar to higher energies if the power-law trend continues. Additionally, re-analysis of the data using the DisCan method with different cuts that provide better energy resolution could improve the limit by as much as a factor of ∼2. |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_MODULES_MEDIA_CONTROLS_ELEMENTS_MEDIA_CONTROL_LOADING_PANEL_ELEMENT_H_
#define THIRD_PARTY_BLINK_RENDERER_MODULES_MEDIA_CONTROLS_ELEMENTS_MEDIA_CONTROL_LOADING_PANEL_ELEMENT_H_
#include "third_party/blink/renderer/modules/media_controls/elements/media_control_animation_event_listener.h"
#include "third_party/blink/renderer/modules/media_controls/elements/media_control_div_element.h"
#include "third_party/blink/renderer/modules/modules_export.h"
namespace blink {
class ContainerNode;
class Element;
class HTMLDivElement;
class MediaControlsImpl;
// The loading panel shows the semi-transparent white mask and the transparent
// loading spinner.
class MODULES_EXPORT MediaControlLoadingPanelElement final
: public MediaControlDivElement,
public MediaControlAnimationEventListener::Observer {
USING_GARBAGE_COLLECTED_MIXIN(MediaControlLoadingPanelElement);
public:
explicit MediaControlLoadingPanelElement(MediaControlsImpl&);
// Update the state based on the Media Controls state.
void UpdateDisplayState();
// Inform the loading panel that the Media Controls have been hidden.
void OnControlsHidden();
// Inform the loading panel that the Media Controls have been shown.
void OnControlsShown();
void Trace(Visitor*) const override;
private:
friend class MediaControlLoadingPanelElementTest;
enum State {
// The loading panel is hidden.
kHidden,
// The loading panel is shown and is playing the "spinner" animation.
kPlaying,
// The loading panel is playing the "cooldown" animation and will hide
// automatically once it is complete.
kCoolingDown,
};
// These are used by AnimationEventListener to notify of animation events.
void OnAnimationEnd() override;
void OnAnimationIteration() override;
Element& WatchedAnimationElement() const override;
// Hide the animation and clean up the shadow DOM.
void HideAnimation();
// This sets the "animation-iteration-count" CSS property on the mask
// background elements.
void SetAnimationIterationCount(const String&);
// The loading panel is only used once and has a lot of DOM elements so these
// two functions will populate the shadow DOM or clean it if the panel is
// hidden.
void CleanupShadowDOM();
void PopulateShadowDOM();
// Cleans up the event listener when this element is removed from the DOM.
void RemovedFrom(ContainerNode&) override;
// This counts how many animation iterations the background elements have
// played.
int animation_count_ = 0;
State state_ = State::kHidden;
// Whether the Media Controls are hidden.
bool controls_hidden_ = false;
Member<MediaControlAnimationEventListener> event_listener_;
Member<HTMLDivElement> mask1_background_;
Member<HTMLDivElement> mask2_background_;
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_MODULES_MEDIA_CONTROLS_ELEMENTS_MEDIA_CONTROL_LOADING_PANEL_ELEMENT_H_
|
The present invention is directed, in general, to electronics devices and, more specifically, to an electronics device having a conductive trace aligned with a thinned portion of a substrate, a method of manufacture therefor, and a system including the same.
High speed communications continue to present ever-increasing demands for ultra-high frequency electronics. Accordingly, as semiconductor and sub-micron lithography technologies advance, more and more high performance chips have been designed with the intent to meet these demands. Such high performance chips may include ultra-wide bandwidth electro-optic (EO) and electroabsorption (EA) modulators. However, a major bottleneck for large scale commercial production of ultra-high frequency electronic components exists as the ultra-wide bandwidth device packaging technology becomes more and more demanding.
Ultra-wide bandwidth devices employ a planar circuit structure, thereby capitalizing on continued advancements in integrated circuit design and fabrication technology. Integrated circuit dimensions may be on the order of about 0.1 microns to about 10 microns. However, signal sources often arrive at the planar circuit structure via coaxial cable, which may have a diameter between about 0.2 millimeters and about 1 millimeter. The signal source, therefore, requires an electrical connector between the coaxial signal cable and the planar circuit. A coax-to-planar circuit transition is needed to couple the signal to the device circuit. In order to accommodate this transition, bonding pads, circuit bends, and tapered circuit sections are required. However, as the operation bandwidth increases, large coupling loss occurs at certain frequencies due to substrate mode coupling. That is, the input signals couple to the substrate instead of coupling to the desired circuit on the substrate.
Theoretical analysis shows that substrate mode coupling occurs when signal frequency reaches a threshold value. This threshold value is inversely proportional to the substrate thickness. An ultra-wide bandwidth device requires this threshold frequency value to be as high as possible, so that signals having frequencies beneath the threshold value do not couple to the substrate instead of the desired circuit. Therefore, in device design, it is important to push this coupling threshold frequency out of the desired bandwidth of the signal.
Based on the inverse proportionality relationship between the threshold frequency and the substrate thickness, the frequency modes at which signal coupling to the substrate can be eliminated or significantly reduced by decreasing the substrate thickness or by reducing the bonding pads dimensions. For example, an ultra-wide bandwidth lithium niobate modulator requires a substrate thickness less than 0.25 millimeters. However, decreasing substrate thickness or bond pad width has drawbacks in large-scale production. For instance, thin substrates are very difficult to handle and are very fragile, thereby increasing per unit cost and decreasing profitability and component reliability. In addition, small bond pads have large mismatch with coax connectors, demand exacting accuracy and critical tolerances during fabrication and assembly, and increase labor and capital requirements, which also increase costs and decrease profits and component reliability.
Accordingly, what is needed in the art is an electronics device and method of manufacture therefor that avoids the disadvantages associated with the prior art.
To address the above-discussed deficiencies of the prior art, the present invention provides an electronics device, a method of manufacture therefor, and a system including the same. The electronics device includes a substrate that has first and second opposing surfaces and first and second thicknesses, wherein the second thickness is less than the first thickness. The electronics device further includes a conductive trace having an input end and an output end and located over the first surface of the substrate, wherein at least one of the input end or output end is aligned with the second substrate thickness.
The foregoing has outlined preferred and alternative features of the present invention so that those skilled in the art may better understand the detailed description of the invention that follows. Additional features of the invention will be described hereinafter that form the subject of the claims of the invention. Those skilled in the art should appreciate that they can readily use the disclosed conception and specific embodiment as a basis for designing or modifying other structures for carrying out the same purposes of the present invention. Those skilled in the art should also realize that such equivalent constructions do not depart from the spirit and scope of the invention. |
/**
* Lors du placement Detecte les cases qui sont disponibles
*/
public boolean casePossible(Case[] c) {
if (c.length == 1) {
}
return true;
} |
<gh_stars>1-10
package edu.virginia.vcgr.genii.security.credentials;
import java.io.Externalizable;
import java.security.cert.X509Certificate;
import java.util.Date;
import edu.virginia.vcgr.genii.security.Describable;
import edu.virginia.vcgr.genii.security.faults.AttributeInvalidException;
/**
* A GenesisII credential object.
*
* @author dmerrill
* @author ckoeritz
*/
public interface NuCredential extends Externalizable, Describable
{
/**
* checks that the attributes are time-valid with respect to the supplied date and that any delegation depth limits are met by the
* credential. this will not check relevant signatures on this object unless they have not previously been checked (if they are good, that
* is recorded to avoid subsequent checks).
*/
public void checkValidity(Date date) throws AttributeInvalidException;
/**
* Returns the identity of the original credential asserter.
*/
public X509Certificate[] getOriginalAsserter();
}
|
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
/**
* @summary Exchange an AAD access token of a Teams user for a new Communication Identity access token.
*/
import {
CommunicationAccessToken,
CommunicationIdentityClient
} from "@azure/communication-identity";
import { UsernamePasswordCredential } from "@azure/identity";
// Load the .env file if it exists
import * as dotenv from "dotenv";
dotenv.config();
// You will need to set this environment variables or edit the following values
const connectionString =
process.env["COMMUNICATION_CONNECTION_STRING"] || "<communication service connection string>";
const aadTenant =
process.env["COMMUNICATION_M365_AAD_TENANT"] || "<azure active directory tenant id>";
const aadAppId = process.env["COMMUNICATION_M365_APP_ID"] || "<azure active directory app id>";
const aadScope = process.env["COMMUNICATION_M365_SCOPE"] || "<azure active directory scope>";
const msalUsername = process.env["COMMUNICATION_MSAL_USERNAME"] || "<msal username>";
const msalPassword = process.env["COMMUNICATION_MSAL_PASSWORD"] || "<<PASSWORD>>";
export async function main() {
if (process.env["SKIP_INT_IDENTITY_EXCHANGE_TOKEN_TEST"] === "true") {
console.log("Skipping the Get Access Token for Teams User sample");
return;
}
console.log("\n== Get Access Token for Teams User sample ==\n");
const client = new CommunicationIdentityClient(connectionString);
// Get an AAD token of a Teams user
console.log("Getting an AAD token of a Teams user");
// Create an AAD credential
const credential = new UsernamePasswordCredential(
aadTenant,
aadAppId,
msalUsername,
msalPassword
);
// Use MSAL to to get the AAD token
const response = await credential.getToken([aadScope]);
console.log(`Retrieved a token with the expiration: ${response.expiresOnTimestamp}`);
console.log("Exchanging the AAD access token for a Communication access token");
// Exchange the AAD access token of a Teams user for a new Communication Identity access token
const communicationAccessToken: CommunicationAccessToken = await client.getTokenForTeamsUser(
response!.token
);
console.log(`Exchanged Communication access token: ${communicationAccessToken.token}`);
}
main().catch((error) => {
console.error("Encountered an error while exchanging token: ");
console.error("Request: \n", error.request);
console.error("\nResponse: \n", error.response);
console.error(error);
});
|
<reponame>tsommerfeld/L2-methods_for_resonances<filename>Python_libs/.ipynb_checkpoints/jolanta-checkpoint.py
import numpy as np
import scipy.special
"""
Collects functions defining and evaluating the Jolanta model potential
"""
"""----------------------------------------------------------
Functions for explicit evaluation used in with DVRs
"""
def Jolanta_1D(x, a=0.2, b=0.0, c=0.14):
"""
default 1D potential:
bound state: -12.26336 eV
resonance: (3.279526396 - 0.2079713j) eV
"""
return (a*x*x-b)*np.exp(-c*x*x)
def Jolanta_1Db(x, param):
"""
c.f. Jolanta_1D
"""
a, b, c = param
return (a*x*x-b)*np.exp(-c*x*x)
def Jolanta_3D(r, param, l=1, mu=1):
"""
standard 1D Jolanta potential in radial form
plus angular momentum potential
param=(0.028, 1.0, 0.028), l=1, and mu=1 gives
Ebound in eV = -7.17051, and
Eres in eV = (3.1729420714-0.160845j)
"""
a, b, c = param
return (a*r**2-b)*np.exp(-c*r**2) + 0.5*l*(l+1)/r**2/mu
def Jolanta_3D_old(r, a=0.1, b=1.2, c=0.1, l=1, as_parts=False):
"""
default 3D potential; has a resonance at 1.75 eV - 0.2i eV
use for DVRs
"""
if as_parts:
Va = a*r**2*np.exp(-c*r**2)
Vb = b*np.exp(-c*r**2)
Vc = 0.5*l*(l+1)/r**2
return (Va, Vb, Vc)
else:
return (a*r**2-b)*np.exp(-c*r**2) + 0.5*l*(l+1)/r**2
"""----------------------------------------------------------------
Representations in a Gaussian basis set
1D = one-dimensional = straightforward
3D = three-dimensional
- add an l=1 angular momentum term
- solve the radial Schroedinger equation
- careful with the integrals for u = R*r
- R are p-GTOs, so u are d-GTOs
"""
def Jolanta_1D_Norm(a, l=1):
"""
see Analytic integrals notebook in Stab directory for formulas
integrals of two GTOs: x*exp(-a_j*x**2)
return the normalization 1/sqrt(S_jj)
S: 2**(1/4) * a1**(1/4) / pi**(1/4)
P: 2 * 2**(1/4) * a1**(3/4) / pi**(1/4)
"""
if l == 0:
return (2*a/np.pi)**0.25
else :
return 2 * (2/np.pi)**0.25 * a**0.75
def Jolanta_3D_PNorm(a):
"""
see Analytic integrals notebook in Stab directory for formulas
integrals of two GTOs: r*exp(-a_j*r**2) dV = r**2 dr
return the normalization 1/sqrt(S_jj)
R is a p-fn, u is a D-fn:
4 * 2**(3/4) * sqrt(3) * a1**(5/4) / (3*pi**(1/4))
"""
return 4 * 2**(3/4) * np.sqrt(3) * a**(5/4) / (3*np.pi**(1/4))
def Jolanta_1D_GTO(a1, a2, param, l=1):
"""
see Analytic integrals notebook in Stab directory for formulas
integrals of two GTOs: x*exp(-a_j*x**2)
computes overlap, kinetic energy, and potential
"""
a, b, c = param
sqrt_pi = np.sqrt(np.pi)
if l == 0:
S = sqrt_pi /np.sqrt(a1 + a2)
T = sqrt_pi * a1 * a2 / (a1 + a2)**(3/2)
V = sqrt_pi*(a - 2*a1*b - 2*a2*b - 2*b*c)/(2*(a1 + a2 + c)**(3/2))
else:
S = sqrt_pi / (2*(a1 + a2)**(3/2))
T = 1.5 * sqrt_pi * a1 * a2 / (a1 + a2)**(5/2)
V = sqrt_pi*(3*a - 2*a1*b - 2*a2*b - 2*b*c)/(4*(a1 + a2 + c)**(5/2))
return S, T, V
def Jolanta_3D_GTO(a1, a2, param, l=0):
"""
see Analytic integrals notebook in Stab directory for formulas
integrals of two GTOs: x*exp(-a_j*x**2)
computes overlap, kinetic energy, and potential
R1 and R2 are p-fns, u1 and u2 are D-fns:
the parameter l is ignored (so that 1D and 3D may call the same fn)
"""
a, b, c = param
sqrt_pi = np.sqrt(np.pi)
S = 3 * sqrt_pi / (8*(a1 + a2)**2.5)
T = sqrt_pi * (1.875*a1*a2 - 0.25*(a1 + a2)**2)/(a1 + a2)**3.5
VJ = 3 * sqrt_pi * (5*a - 2*a1*b - 2*a2*b - 2*b*c)/(16*(a1 + a2 + c)**3.5)
VL = sqrt_pi / (4*(a1 + a2)**1.5)
return S, T, VJ+VL
def Jolanta_3D_W_old(a1, a2, rc):
"""
CAP potential = (r-rc)**2 for x > rc; else 0
a1, a2 exponents of GTO
u(r) = r*R(r)
int(GTO_1 * w(r) * GTO_2) from rc to oo
-(6*sqrt(pi)*a1*rc**2*sqrt(a1 + a2)*exp(rc**2*(a1 + a2))*erf(rc*sqrt(a1 + a2))
- 6*sqrt(pi)*a1*rc**2*sqrt(a1 + a2)*exp(rc**2*(a1 + a2))
+ 2*a1*rc + 6*sqrt(pi)*a2*rc**2*sqrt(a1 + a2)*exp(rc**2*(a1 + a2))*erf(rc*sqrt(a1 + a2))
- 6*sqrt(pi)*a2*rc**2*sqrt(a1 + a2)*exp(rc**2*(a1 + a2)) + 2*a2*rc
+ 15*sqrt(pi)*sqrt(a1 + a2)*exp(rc**2*(a1 + a2))*erf(rc*sqrt(a1 + a2)) - 15*sqrt(pi)*sqrt(a1 + a2)*exp(rc**2*(a1 + a2)))*exp(-rc**2*(a1 + a2))/(16*(a1**4 + 4*a1**3*a2 + 6*a1**2*a2**2 + 4*a1*a2**3 + a2**4))
"""
sp = np.sqrt(np.pi)
s12 = np.sqrt(a1 + a2)
ex12 = np.exp(rc**2*(a1 + a2))
ex12m = np.exp(-rc**2*(a1 + a2))
erf = scipy.special.erf(rc*s12)
W = -( 6*sp*a1*rc**2*s12*ex12*erf
- 6*sp*a1*rc**2*s12*ex12
+ 2*a1*rc
+ 2*a2*rc
+ 6*sp*a2*rc**2*s12*ex12*erf
- 6*sp*a2*rc**2*s12*ex12
+ 15*sp*s12*ex12*erf
- 15*sp*s12*ex12
) * ex12m / (16*(a1+a2)**4)
return W
def Jolanta_3D_W(a, rc):
"""
computes int_rc^oo dr r**4 * exp(-a*r**2) * w(r)
w(r) = (r-rc)**2 for x > rc; else 0
this is for radial p-GTO: u(r) = R(r)*r
u1*u2 = r**4 * exp(-(a1+a2)*r**2)
- rc*exp(-a*rc**2)/(8*a**3)
- 3*sqrt(pi)*rc**2*erf(sqrt(a)*rc)/(8*a**(5/2))
+ 3*sqrt(pi)*rc**2/(8*a**(5/2))
- 15*sqrt(pi)*erf(sqrt(a)*rc)/(16*a**(7/2))
+ 15*sqrt(pi)/(16*a**(7/2))
W = (- rc*exa/(8*a**3)
- 3*sp*rc**2 * erf / (8*a**(5/2))
+ 3*sp*rc**2 / (8*a**(5/2))
- 15*sp * erf / (16*a**(7/2))
+ 15*sp / (16*a**(7/2))
)
W = (- rc*exa / (8*a**3)
+ 3*sp*rc**2 / (8*a**(5/2)) * (1 - erf)
+ 15*sp / (16*a**(7/2)) * (1 - erf)
)
"""
sp = np.sqrt(np.pi)
exa = np.exp(-a*rc**2)
erf = scipy.special.erf(rc*np.sqrt(a))
W = (- rc*exa / (8*a**3)
+ 3*sp*rc**2 / (8*a**(5/2)) * (1 - erf)
+ 15*sp / (16*a**(7/2)) * (1 - erf)
)
return W
def Jolanta_GTO_H(GTO_fn, alphas, Ns, param, l=1):
"""
Hamiltonian matrix in a GTO basis set
Parameters
----------
GTO_fn : either Jolanta_1D_GTO() or Jolanta_3D_GTO()
alphas : np.array of GTO exponents
Ns : np.array of normalization constants
param : (a, b, c): parameters of the Jolanta potential
l : = 0 (even) or 1 (odd) in the 1D case; for 3D ignored
Returns 3 numpy matrices
-------
S : overlap matrix
T : kinetic energy matrix
V : potential energy matrix
"""
nbas = len(alphas)
S=np.zeros((nbas,nbas))
T=np.zeros((nbas,nbas))
V=np.zeros((nbas,nbas))
for i in range(nbas):
ai, Ni = alphas[i], Ns[i]
S[i,i], T[i,i], V[i,i] = GTO_fn(ai, ai, param, l=l)
S[i,i] *= Ni*Ni
T[i,i] *= Ni*Ni
V[i,i] *= Ni*Ni
for j in range(i):
aj, Nj = alphas[j], Ns[j]
Sij, Tij, Vij = GTO_fn(ai, aj, param, l=l)
S[i,j] = S[j,i] = Ni*Nj * Sij
T[i,j] = T[j,i] = Ni*Nj * Tij
V[i,j] = V[j,i] = Ni*Nj * Vij
return S, T, V
def Jolanta_GTO_W(alphas, Ns, rc):
"""
CAP potential w(r) matrix representation in a GTO basis set
Parameters
----------
alphas : np.array of GTO exponents
Ns : np.array of normalization constants
rc : cutoff of w(r)
Returns
-------
W : matrix represention of w(r)
"""
nbas = len(alphas)
W=np.zeros((nbas,nbas))
for i in range(nbas):
ai, Ni = alphas[i], Ns[i]
W[i,i] = Ni * Ni * Jolanta_3D_W(ai+ai, rc)
for j in range(i):
aj, Nj = alphas[j], Ns[j]
W[i,j] = W[j,i] = Ni * Nj * Jolanta_3D_W(ai+aj, rc)
return W
def Eval_GTO_wf(alphas, Ns, cs, xs, l=1):
"""
This is the 1D function
input:
alphas, norms = a basis set
cs = GTO coefficient vector
alphas, Ns, and cs define a wavefunction
xs = positions at which the wf is to be evaluated
"""
nx = len(xs)
nbas = len(cs)
ys = np.zeros(nx)
for i in range(nx):
y=0
#xfactor = xs[i]**l
xsq = xs[i]**2
for k in range(nbas):
y += cs[k] * Ns[k] * np.exp(-alphas[k]*xsq)
ys[i] = y*xs[i]**l
return ys
def Eval_GTO_wf_3D(alphas, Ns, cs, xs, u=True):
"""
This is the 3D function of l=1
u(r) = r**2 * exp(-a*r**2)
R(r) = r * exp(-a*r**2)
input:
alphas, norms = a basis set
cs = GTO coefficient vector
alphas, Ns, and cs define a wavefunction
xs = positions at which the wf is to be evaluated
u=True evaluate the radial function u(r) = r*R(r)
u=False evaluate the radial function R(r) = u(r)/r
"""
if u:
l=2
else:
l=1
nx = len(xs)
nbas = len(cs)
ys = np.zeros(nx)
for i in range(nx):
y=0
xsq = xs[i]**2
for k in range(nbas):
y += cs[k] * Ns[k] * np.exp(-alphas[k]*xsq)
ys[i] = y*xs[i]**l
return ys
|
/**
* Stores a given JAXB annotated object to the given {@link OutputStream} using the given character encoding
*
* @see #storeObjectAsXML(Object, OutputStream)
* @param object
* @param outputStream
* @param encoding
*/
public static void storeObjectAsXML( Object object, OutputStream outputStream, String encoding )
{
final ExceptionHandler exceptionHandler = null;
storeObjectAsXML( object, outputStream, encoding, exceptionHandler );
} |
import re
from .common import Common
from resources.interfaces.reddit_instance import RedditInstance
class RedditHandler(Common):
valid_url = r'(.)*/comments/(?P<submission_id>[a-zA-z0-9]+)/(.)*$'
def __init__(self, link, name, template_data):
super().__init__(link, name, template_data)
def save(self):
reddit = RedditInstance().reddit
match = re.match(self.valid_url, self.link)
if not match:
return False
self.logger.debug("Fetching submission data")
submission = reddit.submission(id=match.group("submission_id"))
return submission
def sanitize_url(self):
pass
|
<reponame>Lycheus/GLSL-ATTILA
/**************************************************************************
*
* Copyright (c) 2002 - 2011 by Computer Architecture Department,
* Universitat Politecnica de Catalunya.
* All rights reserved.
*
* The contents of this file may not be disclosed to third parties,
* copied or duplicated in any form, in whole or in part, without the
* prior permission of the authors, Computer Architecture Department
* and Universitat Politecnica de Catalunya.
*
* Fragment Input definition file.
*
*/
/**
*
* @file FragmentInput.cpp
*
* This file implements the Fragment Input class.
*
* This class carries fragment data between Triangle Traversal,
* the Interpolator, the fragment FIFO and the Pixel Shaders.
*
*/
#include "FragmentInput.h"
#include <stdio.h>
using namespace gpu3d;
/* Creates a new FragmentInput. */
FragmentInput::FragmentInput(u32bit ID, u32bit setupID, Fragment *fragment, TileIdentifier id, u32bit stampUnitID):
tileID(id)
{
/* Set fragment parameters. */
triangleID = ID;
setupTriangle = setupID;
fr = fragment;
stampUnit = stampUnitID;
tileID = id;
/* Write fragment information. */
if (fr != NULL)
sprintf((char *) getInfo(), "SU:%d %d, %d", stampUnitID, fr->getX(), fr->getY());
else
sprintf((char *) getInfo(), "SU:%d last fragment", stampUnitID);
/* Mark the fragment as not culled. */
culled = FALSE;
/* The fragment has no interpolated attributes yet. */
attributes = NULL;
setTag("FrIn");
}
/* Gets the fragment triangle identifier. */
u32bit FragmentInput::getTriangleID() const
{
return triangleID;
}
/* Gets the fragment setup triangle identifier. */
u32bit FragmentInput::getSetupTriangle() const
{
return setupTriangle;
}
/* Gets the fragment interpolated attributes. */
QuadFloat *FragmentInput::getAttributes() const
{
return attributes;
}
/* Sets the fragment interpolated attributes array. */
void FragmentInput::setAttributes(QuadFloat *attrib)
{
attributes = attrib;
}
/* Gets the fragment input Fragment object. */
Fragment *FragmentInput::getFragment() const
{
return fr;
}
/* Sets the fragment new cull flag value. */
void FragmentInput::setCull(bool cull)
{
/* Set the cull flag. */
culled = cull;
}
/* Returns the fragment cull flag. */
bool FragmentInput::isCulled() const
{
return culled;
}
/* Returns the fragment assigned stamp unit. */
u32bit FragmentInput::getStampUnit() const
{
return stampUnit;
}
/* Sets the fragment start cycle. */
void FragmentInput::setStartCycle(u64bit cycle)
{
startCycle = cycle;
}
/* Returns the fragment start cycle. */
u64bit FragmentInput::getStartCycle() const
{
return startCycle;
}
/* Sets the fragment start cycle in the shader. */
void FragmentInput::setStartCycleShader(u64bit cycle)
{
startCycleShader = cycle;
}
/* Returns the fragment start cycle in the shader unit. */
u64bit FragmentInput::getStartCycleShader() const
{
return startCycleShader;
}
/* Sets the number of cycles the fragment spent in the shader. */
void FragmentInput::setShaderLatency(u32bit latency)
{
shaderLatency = latency;
}
/* Returns the number of cycles the fragment spent inside the shader unit . */
u32bit FragmentInput::getShaderLatency() const
{
return shaderLatency;
}
/* Sets the fragment tile identifier. */
void FragmentInput::setTileID(TileIdentifier id)
{
tileID = id;
}
/* Gets the fragment tile identifier. */
TileIdentifier FragmentInput::getTileID() const
{
return tileID;
}
/* Sets the shader unit to which the fragment is assigned. */
void FragmentInput::setShaderUnit(u32bit unit)
{
shaderUnit = unit;
}
/* Gets the shader unit to which the fragment was assigned. */
u32bit FragmentInput::getShaderUnit() const
{
return shaderUnit;
}
|
Various spray coatings have been proposed for use in reducing the temperature rise associated with sun light exposure on metallic and non-metallic substrates. One such coating is disclosed in U.S. Pat. No. 4,546,045 to Elias, which issued Oct. 8, 1985, and involves the application of a paint composition comprised of a film forming polymer and an infrared reflective pigment to polyvinyl chloride (PVC) substrate profile. Following the application of the coating composition, the substrate profile is baked at 130.degree. F. for approximately twenty minutes to ensure proper coating adhesion to the substrate profile.
A difficulty with conventional coating techniques exists in that depending upon the substrate or profile to be coated, it may not be economically or physically possible to bake the painted PVC profile to provide the required curing for proper adhesion of the thermally reflective paint. In particular, often it is desirable to provide coatings to PVC profiles which are used in the formation of architectural components such as pilasters, windows and doors. Such profiles are manufactured off site and are coated as elongated PVC or other plastic extrusions prior to final door or window assembly. Similarly, it is envisioned that various plastic profiles used in the automotive industry to form bumpers, fenders and other auto parts may be too large or cumbersome to heat cure and achieve the necessary adhesion of the thermally reflective paint. |
package com.virtuslab.branchlayout.impl.readwrite;
import static com.virtuslab.branchlayout.impl.readwrite.IndentSpec.SPACE;
import static com.virtuslab.branchlayout.impl.readwrite.IndentSpec.TAB;
import java.nio.file.Files;
import java.nio.file.Path;
import io.vavr.collection.List;
import io.vavr.collection.Stream;
import io.vavr.control.Try;
import lombok.CustomLog;
import lombok.val;
import org.checkerframework.checker.index.qual.NonNegative;
import org.checkerframework.checker.index.qual.Positive;
import com.virtuslab.branchlayout.api.BranchLayoutException;
@CustomLog
public final class BranchLayoutFileUtils {
private BranchLayoutFileUtils() {}
public static final @Positive int DEFAULT_INDENT_WIDTH = 2;
public static final char DEFAULT_INDENT_CHARACTER = SPACE;
private static final IndentSpec DEFAULT_SPEC = new IndentSpec(DEFAULT_INDENT_CHARACTER, DEFAULT_INDENT_WIDTH);
// Extracted to a method so that it can be mocked in the tests.
public static IndentSpec getDefaultSpec() {
return DEFAULT_SPEC;
}
public static @NonNegative int getIndentWidth(String line, char indentCharacter) {
return Stream.ofAll(line.chars().boxed()).takeWhile(c -> c == indentCharacter).size();
}
public static IndentSpec deriveIndentSpec(Path path) {
LOG.debug("Entering: branch layout file path: ${path}");
List<String> lines = Try.of(() -> BranchLayoutFileUtils.readFileLines(path))
.getOrElse(() -> {
LOG.debug(() -> "Failed to read branch layout file from ${path}. Falling back to default indent definition.");
return List.empty();
});
return deriveIndentSpec(lines);
}
public static IndentSpec deriveIndentSpec(List<String> lines) {
LOG.debug(() -> "${lines.length()} line(s) found");
val firstLineWithBlankPrefixOption = lines.reject(line -> line.trim().isEmpty())
.find(line -> line.startsWith(String.valueOf(SPACE))
|| line.startsWith(String.valueOf(TAB)));
char indentCharacter = BranchLayoutFileUtils.DEFAULT_INDENT_CHARACTER;
int indentWidth = BranchLayoutFileUtils.DEFAULT_INDENT_WIDTH;
// Redundant non-emptiness check to satisfy IndexChecker
if (firstLineWithBlankPrefixOption.isDefined() && !firstLineWithBlankPrefixOption.get().isEmpty()) {
indentCharacter = firstLineWithBlankPrefixOption.get().charAt(0);
indentWidth = BranchLayoutFileUtils.getIndentWidth(firstLineWithBlankPrefixOption.get(), indentCharacter);
// we are processing a line satisfying `line.startsWith(" ") || line.startsWith("\t")`
assert indentWidth > 0 : "indent width is ${indentWidth} <= 0";
}
IndentSpec indentSpec = new IndentSpec(indentCharacter, indentWidth);
LOG.debug(() -> "Indent character is ${indentSpec.getIndentCharacter() == '\\t' ? \"TAB\" :" +
" indentSpec.getIndentCharacter() == ' ' ? \"SPACE\" : \"'\" + indentSpec.getIndentCharacter() + \"'\"}");
LOG.debug(() -> "Indent width is ${indentSpec.getIndentWidth()}");
return indentSpec;
}
public static List<String> readFileLines(Path path) throws BranchLayoutException {
return Try.of(() -> List.ofAll(Files.readAllLines(path))).getOrElseThrow(
e -> new BranchLayoutException("Error while loading branch layout file (${path.toAbsolutePath()})", e));
}
public static boolean hasProperIndentationCharacter(String line, char expectedIndentationCharacter) {
char unexpectedIndentationCharacter = expectedIndentationCharacter == SPACE
? TAB
: SPACE;
return Stream.ofAll(line.toCharArray())
.takeWhile(c -> c != expectedIndentationCharacter)
.headOption()
.map(c -> c != unexpectedIndentationCharacter)
.getOrElse(true);
}
}
|
Legislative Confrontation of Groupthink in US Natural Resource Agencies Within the apparent strength of proud, successful, cohesive agencies are often found characteristics that inhibit their ability to adapt successfully in an environment of change. Organizations can be so successful that they feel invulnerable to public petition. They can stereotype, and dismiss as uninformed outsiders or biased trouble-makers, any others who criticize them. Janis has described such organizational tendencies as groupthink behaviourwhere proud and successful, professional organizations such as the USDA-Forest Service (USFS), consider themselves superior to the public, and where any external criticism is filtered, rationalized, and stereotyped to minimize the need for organizational introspection and change. This article looks at how groupthink tendencies were a normal, understandable part of USFS behaviour in the 195060sand how these tendencies inhibited its adaptation to a post-industrial American society that was increasingly concerned with forest recreational and amenity values. The National Environmental Policy Act was the first of several laws that directly confronted the utilitarian, developmental values and the groupthink tendencies in federal natural resource agencies. Responding to NEPA and other legislative and policy changes of that era, the USFS has latterly become a much more sexually and professionally diverse agency, that is more open to public input than formerly. This volte-face has reduced the probability of groupthink operating in the agency, and consequently improved the latter's prospects of a useful and equitable future. |
#### Python 3.5+ required ####
from sys import argv, exit
def print_usage():
print(f'USAGE: python {argv[0]} <file>')
def get_word(line: str) -> str:
''' Gets first word before comma of a line '''
idx = line.find(',')
return line if idx == -1 else line[:idx]
def count_before_comma(file_name: str) -> dict:
''' Count the Words before Each comma ','
with new lines as seperators '''
word_count = {}
with open(file_name) as f:
for line in f:
word = get_word(line)
word_count[word] = word_count.setdefault(word, 0) + 1
return word_count
def pretty_print_answer(answer: dict):
''' Prints the answer of the problem. '''
for word, count in answer.items():
print(word, count)
if __name__ == '__main__':
if len(argv) != 2:
print_usage()
exit(1)
pretty_print_answer(count_before_comma(argv[1])) |
package fst
import (
"fmt"
"github.com/jtejido/golucene/core/util"
"reflect"
)
type FSTStore interface {
util.Accountable
Init(in util.DataInput, numBytes int64) error
Size() int64
ReverseBytesReader() BytesReader
WriteTo(out util.DataOutput) error
}
type ByteHeap []byte
func (h ByteHeap) Len() int { return len(h) }
func (h ByteHeap) Less(i, j int) bool { return int(h[i]) < int(h[j]) }
func (h ByteHeap) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
func (h *ByteHeap) Push(x interface{}) {
*h = append(*h, x.(byte))
}
func (h *ByteHeap) Pop() interface{} {
old := *h
n := len(old)
x := old[n-1]
*h = old[0 : n-1]
return x
}
type OnHeapFSTStore struct {
bytes *BytesStore
bytesArray []byte
maxBlockBits int
}
func newOnHeapFSTStore(maxBlockBits int) *OnHeapFSTStore {
if maxBlockBits < 1 || maxBlockBits > 30 {
panic(fmt.Sprintf("maxBlockBits should be 1 .. 30; got %d", maxBlockBits))
}
return &OnHeapFSTStore{maxBlockBits: maxBlockBits}
}
func (onfsts *OnHeapFSTStore) Init(in util.DataInput, numBytes int64) (err error) {
if numBytes > 1<<uint(onfsts.maxBlockBits) {
// FST is big: we need multiple pages
onfsts.bytes, err = newBytesStoreFromInput(in, numBytes, 1<<uint(onfsts.maxBlockBits))
} else {
// FST fits into a single block: use ByteArrayBytesStoreReader for less overhead
onfsts.bytesArray = make([]byte, numBytes)
err = in.ReadBytes(onfsts.bytesArray)
}
return
}
func (onfsts *OnHeapFSTStore) Size() int64 {
if onfsts.bytesArray != nil {
return int64(len(onfsts.bytesArray))
} else {
return onfsts.bytes.RamBytesUsed()
}
}
func (onfsts *OnHeapFSTStore) RamBytesUsed() int64 {
rbu := util.ShallowSizeOfInstance(reflect.TypeOf(OnHeapFSTStore{}))
return rbu + onfsts.Size()
}
func (onfsts *OnHeapFSTStore) ReverseBytesReader() BytesReader {
if onfsts.bytesArray != nil {
return newReverseBytesReader(onfsts.bytesArray)
} else {
return onfsts.bytes.reverseReader()
}
}
func (onfsts *OnHeapFSTStore) WriteTo(out util.DataOutput) (err error) {
if onfsts.bytes != nil {
numBytes := onfsts.bytes.position()
if err = out.WriteVLong(numBytes); err == nil {
err = onfsts.bytes.writeTo(out)
}
} else {
assert(onfsts.bytesArray != nil)
if err = out.WriteVLong(int64(len(onfsts.bytesArray))); err == nil {
err = out.WriteBytes(onfsts.bytesArray)
}
}
return
}
|
/*
WARNING: THIS FILE IS AUTO-GENERATED. DO NOT MODIFY.
This file was generated from ControlData.idl using "rtiddsgen".
The rtiddsgen tool is part of the RTI Connext distribution.
For more information, type 'rtiddsgen -help' at a command shell
or consult the RTI Connext manual.
*/
#include <string.h>
#ifdef __cplusplus
#ifndef ndds_cpp_h
#include "ndds/ndds_cpp.h"
#endif
#else
#ifndef ndds_c_h
#include "ndds/ndds_c.h"
#endif
#endif
#ifndef osapi_type_h
#include "osapi/osapi_type.h"
#endif
#ifndef osapi_heap_h
#include "osapi/osapi_heap.h"
#endif
#ifndef osapi_utility_h
#include "osapi/osapi_utility.h"
#endif
#ifndef cdr_type_h
#include "cdr/cdr_type.h"
#endif
#ifndef cdr_type_object_h
#include "cdr/cdr_typeObject.h"
#endif
#ifndef cdr_encapsulation_h
#include "cdr/cdr_encapsulation.h"
#endif
#ifndef cdr_stream_h
#include "cdr/cdr_stream.h"
#endif
#ifndef pres_typePlugin_h
#include "pres/pres_typePlugin.h"
#endif
#include "ControlDataPlugin.h"
/* ------------------------------------------------------------------------
Enum Type: Action
* ------------------------------------------------------------------------- */
/* ------------------------------------------------------------------------
* (De)Serialization Methods
* ------------------------------------------------------------------------ */
RTIBool ActionPlugin_serialize(
PRESTypePluginEndpointData endpoint_data,
const Action *sample,
struct RTICdrStream *stream,
RTIBool serialize_encapsulation,
RTIEncapsulationId encapsulation_id,
RTIBool serialize_sample,
void *endpoint_plugin_qos)
{
char * position = NULL;
if (endpoint_data) {} /* To avoid warnings */
if (endpoint_plugin_qos) {} /* To avoid warnings */
if(serialize_encapsulation) {
if (!RTICdrStream_serializeAndSetCdrEncapsulation(stream, encapsulation_id)) {
return RTI_FALSE;
}
position = RTICdrStream_resetAlignment(stream);
}
if(serialize_sample) {
if (!RTICdrStream_serializeEnum(stream, sample))
{
return RTI_FALSE;
}
}
if(serialize_encapsulation) {
RTICdrStream_restoreAlignment(stream,position);
}
return RTI_TRUE;
}
RTIBool
ActionPlugin_deserialize_sample(
PRESTypePluginEndpointData endpoint_data,
Action *sample,
struct RTICdrStream *stream,
RTIBool deserialize_encapsulation,
RTIBool deserialize_sample,
void *endpoint_plugin_qos)
{
char * position = NULL;
DDS_Enum enum_tmp;
if (endpoint_data) {} /* To avoid warnings */
if (endpoint_plugin_qos) {} /* To avoid warnings */
if(deserialize_encapsulation) {
if (!RTICdrStream_deserializeAndSetCdrEncapsulation(stream)) {
return RTI_FALSE;
}
position = RTICdrStream_resetAlignment(stream);
}
if(deserialize_sample) {
if (!RTICdrStream_deserializeEnum(stream, &enum_tmp))
{
return RTI_FALSE;
}
switch (enum_tmp) {
case READY:
*sample=READY;
break;
case START:
*sample=START;
break;
default:
return RTI_FALSE;
}
}
if(deserialize_encapsulation) {
RTICdrStream_restoreAlignment(stream,position);
}
return RTI_TRUE;
}
RTIBool ActionPlugin_skip(
PRESTypePluginEndpointData endpoint_data,
struct RTICdrStream *stream,
RTIBool skip_encapsulation,
RTIBool skip_sample,
void *endpoint_plugin_qos)
{
char * position = NULL;
if (endpoint_data) {} /* To avoid warnings */
if (endpoint_plugin_qos) {} /* To avoid warnings */
if(skip_encapsulation) {
if (!RTICdrStream_skipEncapsulation(stream)) {
return RTI_FALSE;
}
position = RTICdrStream_resetAlignment(stream);
}
if(skip_sample) {
if (!RTICdrStream_skipEnum(stream)) {
return RTI_FALSE;
}
}
if(skip_encapsulation) {
RTICdrStream_restoreAlignment(stream,position);
}
return RTI_TRUE;
}
unsigned int ActionPlugin_get_serialized_sample_max_size(
PRESTypePluginEndpointData endpoint_data,
RTIBool include_encapsulation,
RTIEncapsulationId encapsulation_id,
unsigned int current_alignment)
{
unsigned int initial_alignment = current_alignment;
unsigned int encapsulation_size = current_alignment;
if (endpoint_data) {} /* To avoid warnings */
if (include_encapsulation) {
if (!RTICdrEncapsulation_validEncapsulationId(encapsulation_id)) {
return 1;
}
RTICdrStream_getEncapsulationSize(encapsulation_size);
encapsulation_size -= current_alignment;
current_alignment = 0;
initial_alignment = 0;
}
current_alignment += RTICdrType_getEnumMaxSizeSerialized(current_alignment);
if (include_encapsulation) {
current_alignment += encapsulation_size;
}
return current_alignment - initial_alignment;
}
unsigned int ActionPlugin_get_serialized_sample_min_size(
PRESTypePluginEndpointData endpoint_data,
RTIBool include_encapsulation,
RTIEncapsulationId encapsulation_id,
unsigned int current_alignment)
{
unsigned int initial_alignment = current_alignment;
current_alignment += ActionPlugin_get_serialized_sample_max_size(
endpoint_data,include_encapsulation,
encapsulation_id, current_alignment);
return current_alignment - initial_alignment;
}
unsigned int
ActionPlugin_get_serialized_sample_size(
PRESTypePluginEndpointData endpoint_data,
RTIBool include_encapsulation,
RTIEncapsulationId encapsulation_id,
unsigned int current_alignment,
const Action * sample)
{
unsigned int initial_alignment = current_alignment;
if (sample) {} /* To avoid warnings */
current_alignment += ActionPlugin_get_serialized_sample_max_size(
endpoint_data,include_encapsulation,
encapsulation_id, current_alignment);
return current_alignment - initial_alignment;
}
/* ------------------------------------------------------------------------
Key Management functions:
* ------------------------------------------------------------------------ */
RTIBool ActionPlugin_serialize_key(
PRESTypePluginEndpointData endpoint_data,
const Action *sample,
struct RTICdrStream *stream,
RTIBool serialize_encapsulation,
RTIEncapsulationId encapsulation_id,
RTIBool serialize_key,
void *endpoint_plugin_qos)
{
return ActionPlugin_serialize(
endpoint_data, sample, stream,
serialize_encapsulation, encapsulation_id,
serialize_key, endpoint_plugin_qos);
}
RTIBool ActionPlugin_deserialize_key_sample(
PRESTypePluginEndpointData endpoint_data,
Action *sample,
struct RTICdrStream *stream,
RTIBool deserialize_encapsulation,
RTIBool deserialize_key,
void *endpoint_plugin_qos)
{
return ActionPlugin_deserialize_sample(
endpoint_data, sample, stream, deserialize_encapsulation,
deserialize_key, endpoint_plugin_qos);
}
unsigned int ActionPlugin_get_serialized_key_max_size(
PRESTypePluginEndpointData endpoint_data,
RTIBool include_encapsulation,
RTIEncapsulationId encapsulation_id,
unsigned int current_alignment)
{
unsigned int initial_alignment = current_alignment;
current_alignment += ActionPlugin_get_serialized_sample_max_size(
endpoint_data,include_encapsulation,
encapsulation_id, current_alignment);
return current_alignment - initial_alignment;
}
RTIBool
ActionPlugin_serialized_sample_to_key(
PRESTypePluginEndpointData endpoint_data,
Action *sample,
struct RTICdrStream *stream,
RTIBool deserialize_encapsulation,
RTIBool deserialize_key,
void *endpoint_plugin_qos)
{
return ActionPlugin_deserialize_sample(
endpoint_data, sample, stream, deserialize_encapsulation,
deserialize_key, endpoint_plugin_qos);
}
/* ----------------------------------------------------------------------------
Support functions:
* ---------------------------------------------------------------------------- */
void ActionPluginSupport_print_data(
const Action *sample,
const char *description, int indent_level)
{
if (description != NULL) {
RTICdrType_printIndent(indent_level);
RTILog_debug("%s:\n", description);
}
if (sample == NULL) {
RTICdrType_printIndent(indent_level+1);
RTILog_debug("NULL\n");
return;
}
RTICdrType_printEnum((RTICdrEnum *)sample, "Action", indent_level + 1);
}
/* --------------------------------------------------------------------------------------
* Type ControlData
* -------------------------------------------------------------------------------------- */
/* --------------------------------------------------------------------------------------
Support functions:
* -------------------------------------------------------------------------------------- */
ControlData *
ControlDataPluginSupport_create_data_ex(RTIBool allocate_pointers){
ControlData *sample = NULL;
RTIOsapiHeap_allocateStructure(
&sample, ControlData);
if(sample != NULL) {
if (!ControlData_initialize_ex(sample,allocate_pointers, RTI_TRUE)) {
RTIOsapiHeap_freeStructure(sample);
return NULL;
}
}
return sample;
}
ControlData *
ControlDataPluginSupport_create_data(void)
{
return ControlDataPluginSupport_create_data_ex(RTI_TRUE);
}
void
ControlDataPluginSupport_destroy_data_ex(
ControlData *sample,RTIBool deallocate_pointers) {
ControlData_finalize_ex(sample,deallocate_pointers);
RTIOsapiHeap_freeStructure(sample);
}
void
ControlDataPluginSupport_destroy_data(
ControlData *sample) {
ControlDataPluginSupport_destroy_data_ex(sample,RTI_TRUE);
}
RTIBool
ControlDataPluginSupport_copy_data(
ControlData *dst,
const ControlData *src)
{
return ControlData_copy(dst,src);
}
void
ControlDataPluginSupport_print_data(
const ControlData *sample,
const char *desc,
unsigned int indent_level)
{
RTICdrType_printIndent(indent_level);
if (desc != NULL) {
RTILog_debug("%s:\n", desc);
} else {
RTILog_debug("\n");
}
if (sample == NULL) {
RTILog_debug("NULL\n");
return;
}
RTICdrType_printUnsignedLong(
&sample->appId, "appId", indent_level + 1);
ActionPluginSupport_print_data(
&sample->action, "action", indent_level + 1);
}
/* ----------------------------------------------------------------------------
Callback functions:
* ---------------------------------------------------------------------------- */
PRESTypePluginParticipantData
ControlDataPlugin_on_participant_attached(
void *registration_data,
const struct PRESTypePluginParticipantInfo *participant_info,
RTIBool top_level_registration,
void *container_plugin_context,
RTICdrTypeCode *type_code)
{
if (registration_data) {} /* To avoid warnings */
if (participant_info) {} /* To avoid warnings */
if (top_level_registration) {} /* To avoid warnings */
if (container_plugin_context) {} /* To avoid warnings */
if (type_code) {} /* To avoid warnings */
return PRESTypePluginDefaultParticipantData_new(participant_info);
}
void
ControlDataPlugin_on_participant_detached(
PRESTypePluginParticipantData participant_data)
{
PRESTypePluginDefaultParticipantData_delete(participant_data);
}
PRESTypePluginEndpointData
ControlDataPlugin_on_endpoint_attached(
PRESTypePluginParticipantData participant_data,
const struct PRESTypePluginEndpointInfo *endpoint_info,
RTIBool top_level_registration,
void *containerPluginContext)
{
PRESTypePluginEndpointData epd = NULL;
unsigned int serializedSampleMaxSize;
if (top_level_registration) {} /* To avoid warnings */
if (containerPluginContext) {} /* To avoid warnings */
epd = PRESTypePluginDefaultEndpointData_new(
participant_data,
endpoint_info,
(PRESTypePluginDefaultEndpointDataCreateSampleFunction)
ControlDataPluginSupport_create_data,
(PRESTypePluginDefaultEndpointDataDestroySampleFunction)
ControlDataPluginSupport_destroy_data,
NULL, NULL);
if (epd == NULL) {
return NULL;
}
if (endpoint_info->endpointKind == PRES_TYPEPLUGIN_ENDPOINT_WRITER) {
serializedSampleMaxSize = ControlDataPlugin_get_serialized_sample_max_size(
epd,RTI_FALSE,RTI_CDR_ENCAPSULATION_ID_CDR_BE,0);
PRESTypePluginDefaultEndpointData_setMaxSizeSerializedSample(epd, serializedSampleMaxSize);
if (PRESTypePluginDefaultEndpointData_createWriterPool(
epd,
endpoint_info,
(PRESTypePluginGetSerializedSampleMaxSizeFunction)
ControlDataPlugin_get_serialized_sample_max_size, epd,
(PRESTypePluginGetSerializedSampleSizeFunction)
ControlDataPlugin_get_serialized_sample_size,
epd) == RTI_FALSE) {
PRESTypePluginDefaultEndpointData_delete(epd);
return NULL;
}
}
return epd;
}
void
ControlDataPlugin_on_endpoint_detached(
PRESTypePluginEndpointData endpoint_data)
{
PRESTypePluginDefaultEndpointData_delete(endpoint_data);
}
RTIBool
ControlDataPlugin_copy_sample(
PRESTypePluginEndpointData endpoint_data,
ControlData *dst,
const ControlData *src)
{
if (endpoint_data) {} /* To avoid warnings */
return ControlDataPluginSupport_copy_data(dst,src);
}
/* --------------------------------------------------------------------------------------
(De)Serialize functions:
* -------------------------------------------------------------------------------------- */
unsigned int
ControlDataPlugin_get_serialized_sample_max_size(
PRESTypePluginEndpointData endpoint_data,
RTIBool include_encapsulation,
RTIEncapsulationId encapsulation_id,
unsigned int current_alignment);
RTIBool
ControlDataPlugin_serialize(
PRESTypePluginEndpointData endpoint_data,
const ControlData *sample,
struct RTICdrStream *stream,
RTIBool serialize_encapsulation,
RTIEncapsulationId encapsulation_id,
RTIBool serialize_sample,
void *endpoint_plugin_qos)
{
char * position = NULL;
RTIBool retval = RTI_TRUE;
if (endpoint_data) {} /* To avoid warnings */
if (endpoint_plugin_qos) {} /* To avoid warnings */
if(serialize_encapsulation) {
if (!RTICdrStream_serializeAndSetCdrEncapsulation(stream, encapsulation_id)) {
return RTI_FALSE;
}
position = RTICdrStream_resetAlignment(stream);
}
if(serialize_sample) {
if (!RTICdrStream_serializeUnsignedLong(
stream, &sample->appId)) {
return RTI_FALSE;
}
if (!ActionPlugin_serialize(
endpoint_data,
&sample->action,
stream,
RTI_FALSE, encapsulation_id,
RTI_TRUE,
endpoint_plugin_qos)) {
return RTI_FALSE;
}
}
if(serialize_encapsulation) {
RTICdrStream_restoreAlignment(stream,position);
}
return retval;
}
RTIBool
ControlDataPlugin_deserialize_sample(
PRESTypePluginEndpointData endpoint_data,
ControlData *sample,
struct RTICdrStream *stream,
RTIBool deserialize_encapsulation,
RTIBool deserialize_sample,
void *endpoint_plugin_qos)
{
char * position = NULL;
RTIBool done = RTI_FALSE;
if (endpoint_data) {} /* To avoid warnings */
if (endpoint_plugin_qos) {} /* To avoid warnings */
if(deserialize_encapsulation) {
/* Deserialize encapsulation */
if (!RTICdrStream_deserializeAndSetCdrEncapsulation(stream)) {
return RTI_FALSE;
}
position = RTICdrStream_resetAlignment(stream);
}
if(deserialize_sample) {
ControlData_initialize_ex(sample, RTI_FALSE, RTI_FALSE);
if (!RTICdrStream_deserializeUnsignedLong(
stream, &sample->appId)) {
goto fin;
}
if (!ActionPlugin_deserialize_sample(
endpoint_data,
&sample->action,
stream,
RTI_FALSE, RTI_TRUE,
endpoint_plugin_qos)) {
goto fin;
}
}
done = RTI_TRUE;
fin:
if (done != RTI_TRUE && RTICdrStream_getRemainder(stream) > 0) {
return RTI_FALSE;
}
if(deserialize_encapsulation) {
RTICdrStream_restoreAlignment(stream,position);
}
return RTI_TRUE;
}
RTIBool
ControlDataPlugin_deserialize(
PRESTypePluginEndpointData endpoint_data,
ControlData **sample,
RTIBool * drop_sample,
struct RTICdrStream *stream,
RTIBool deserialize_encapsulation,
RTIBool deserialize_sample,
void *endpoint_plugin_qos)
{
if (drop_sample) {} /* To avoid warnings */
return ControlDataPlugin_deserialize_sample(
endpoint_data, (sample != NULL)?*sample:NULL,
stream, deserialize_encapsulation, deserialize_sample,
endpoint_plugin_qos);
}
RTIBool ControlDataPlugin_skip(
PRESTypePluginEndpointData endpoint_data,
struct RTICdrStream *stream,
RTIBool skip_encapsulation,
RTIBool skip_sample,
void *endpoint_plugin_qos)
{
char * position = NULL;
RTIBool done = RTI_FALSE;
if (endpoint_data) {} /* To avoid warnings */
if (endpoint_plugin_qos) {} /* To avoid warnings */
if(skip_encapsulation) {
if (!RTICdrStream_skipEncapsulation(stream)) {
return RTI_FALSE;
}
position = RTICdrStream_resetAlignment(stream);
}
if (skip_sample) {
if (!RTICdrStream_skipUnsignedLong(stream)) {
goto fin;
}
if (!ActionPlugin_skip(
endpoint_data,
stream,
RTI_FALSE, RTI_TRUE,
endpoint_plugin_qos)) {
goto fin;
}
}
done = RTI_TRUE;
fin:
if (done != RTI_TRUE && RTICdrStream_getRemainder(stream) > 0) {
return RTI_FALSE;
}
if(skip_encapsulation) {
RTICdrStream_restoreAlignment(stream,position);
}
return RTI_TRUE;
}
unsigned int
ControlDataPlugin_get_serialized_sample_max_size(
PRESTypePluginEndpointData endpoint_data,
RTIBool include_encapsulation,
RTIEncapsulationId encapsulation_id,
unsigned int current_alignment)
{
unsigned int initial_alignment = current_alignment;
unsigned int encapsulation_size = current_alignment;
if (endpoint_data) {} /* To avoid warnings */
if (include_encapsulation) {
if (!RTICdrEncapsulation_validEncapsulationId(encapsulation_id)) {
return 1;
}
RTICdrStream_getEncapsulationSize(encapsulation_size);
encapsulation_size -= current_alignment;
current_alignment = 0;
initial_alignment = 0;
}
current_alignment += RTICdrType_getUnsignedLongMaxSizeSerialized(
current_alignment);
current_alignment += ActionPlugin_get_serialized_sample_max_size(
endpoint_data,RTI_FALSE,encapsulation_id,current_alignment);
if (include_encapsulation) {
current_alignment += encapsulation_size;
}
return current_alignment - initial_alignment;
}
unsigned int
ControlDataPlugin_get_serialized_sample_min_size(
PRESTypePluginEndpointData endpoint_data,
RTIBool include_encapsulation,
RTIEncapsulationId encapsulation_id,
unsigned int current_alignment)
{
unsigned int initial_alignment = current_alignment;
unsigned int encapsulation_size = current_alignment;
if (endpoint_data) {} /* To avoid warnings */
if (include_encapsulation) {
if (!RTICdrEncapsulation_validEncapsulationId(encapsulation_id)) {
return 1;
}
RTICdrStream_getEncapsulationSize(encapsulation_size);
encapsulation_size -= current_alignment;
current_alignment = 0;
initial_alignment = 0;
}
current_alignment += RTICdrType_getUnsignedLongMaxSizeSerialized(
current_alignment);
current_alignment += ActionPlugin_get_serialized_sample_min_size(
endpoint_data,RTI_FALSE,encapsulation_id,current_alignment);
if (include_encapsulation) {
current_alignment += encapsulation_size;
}
return current_alignment - initial_alignment;
}
/* Returns the size of the sample in its serialized form (in bytes).
* It can also be an estimation in excess of the real buffer needed
* during a call to the serialize() function.
* The value reported does not have to include the space for the
* encapsulation flags.
*/
unsigned int
ControlDataPlugin_get_serialized_sample_size(
PRESTypePluginEndpointData endpoint_data,
RTIBool include_encapsulation,
RTIEncapsulationId encapsulation_id,
unsigned int current_alignment,
const ControlData * sample)
{
unsigned int initial_alignment = current_alignment;
unsigned int encapsulation_size = current_alignment;
if (endpoint_data) {} /* To avoid warnings */
if (sample) {} /* To avoid warnings */
if (include_encapsulation) {
if (!RTICdrEncapsulation_validEncapsulationId(encapsulation_id)) {
return 1;
}
RTICdrStream_getEncapsulationSize(encapsulation_size);
encapsulation_size -= current_alignment;
current_alignment = 0;
initial_alignment = 0;
}
current_alignment += RTICdrType_getUnsignedLongMaxSizeSerialized(
current_alignment);
current_alignment += ActionPlugin_get_serialized_sample_size(
endpoint_data,RTI_FALSE, encapsulation_id,
current_alignment, &sample->action);
if (include_encapsulation) {
current_alignment += encapsulation_size;
}
return current_alignment - initial_alignment;
}
/* --------------------------------------------------------------------------------------
Key Management functions:
* -------------------------------------------------------------------------------------- */
PRESTypePluginKeyKind
ControlDataPlugin_get_key_kind(void)
{
return PRES_TYPEPLUGIN_NO_KEY;
}
RTIBool
ControlDataPlugin_serialize_key(
PRESTypePluginEndpointData endpoint_data,
const ControlData *sample,
struct RTICdrStream *stream,
RTIBool serialize_encapsulation,
RTIEncapsulationId encapsulation_id,
RTIBool serialize_key,
void *endpoint_plugin_qos)
{
char * position = NULL;
if (endpoint_data) {} /* To avoid warnings */
if (endpoint_plugin_qos) {} /* To avoid warnings */
if(serialize_encapsulation) {
if (!RTICdrStream_serializeAndSetCdrEncapsulation(stream, encapsulation_id)) {
return RTI_FALSE;
}
position = RTICdrStream_resetAlignment(stream);
}
if(serialize_key) {
if (!ControlDataPlugin_serialize(
endpoint_data,
sample,
stream,
RTI_FALSE, encapsulation_id,
RTI_TRUE,
endpoint_plugin_qos)) {
return RTI_FALSE;
}
}
if(serialize_encapsulation) {
RTICdrStream_restoreAlignment(stream,position);
}
return RTI_TRUE;
}
RTIBool ControlDataPlugin_deserialize_key_sample(
PRESTypePluginEndpointData endpoint_data,
ControlData *sample,
struct RTICdrStream *stream,
RTIBool deserialize_encapsulation,
RTIBool deserialize_key,
void *endpoint_plugin_qos)
{
char * position = NULL;
if (endpoint_data) {} /* To avoid warnings */
if (endpoint_plugin_qos) {} /* To avoid warnings */
if(deserialize_encapsulation) {
/* Deserialize encapsulation */
if (!RTICdrStream_deserializeAndSetCdrEncapsulation(stream)) {
return RTI_FALSE;
}
position = RTICdrStream_resetAlignment(stream);
}
if (deserialize_key) {
if (!ControlDataPlugin_deserialize_sample(
endpoint_data, sample, stream,
RTI_FALSE, RTI_TRUE,
endpoint_plugin_qos)) {
return RTI_FALSE;
}
}
if(deserialize_encapsulation) {
RTICdrStream_restoreAlignment(stream,position);
}
return RTI_TRUE;
}
RTIBool ControlDataPlugin_deserialize_key(
PRESTypePluginEndpointData endpoint_data,
ControlData **sample,
RTIBool * drop_sample,
struct RTICdrStream *stream,
RTIBool deserialize_encapsulation,
RTIBool deserialize_key,
void *endpoint_plugin_qos)
{
if (drop_sample) {} /* To avoid warnings */
return ControlDataPlugin_deserialize_key_sample(
endpoint_data, (sample != NULL)?*sample:NULL, stream,
deserialize_encapsulation, deserialize_key, endpoint_plugin_qos);
}
unsigned int
ControlDataPlugin_get_serialized_key_max_size(
PRESTypePluginEndpointData endpoint_data,
RTIBool include_encapsulation,
RTIEncapsulationId encapsulation_id,
unsigned int current_alignment)
{
unsigned int encapsulation_size = current_alignment;
unsigned int initial_alignment = current_alignment;
if (endpoint_data) {} /* To avoid warnings */
if (include_encapsulation) {
if (!RTICdrEncapsulation_validEncapsulationId(encapsulation_id)) {
return 1;
}
RTICdrStream_getEncapsulationSize(encapsulation_size);
encapsulation_size -= current_alignment;
current_alignment = 0;
initial_alignment = 0;
}
current_alignment += ControlDataPlugin_get_serialized_sample_max_size(
endpoint_data,RTI_FALSE, encapsulation_id, current_alignment);
if (include_encapsulation) {
current_alignment += encapsulation_size;
}
return current_alignment - initial_alignment;
}
RTIBool
ControlDataPlugin_serialized_sample_to_key(
PRESTypePluginEndpointData endpoint_data,
ControlData *sample,
struct RTICdrStream *stream,
RTIBool deserialize_encapsulation,
RTIBool deserialize_key,
void *endpoint_plugin_qos)
{
char * position = NULL;
RTIBool done = RTI_FALSE;
if (stream == NULL) goto fin; /* To avoid warnings */
if(deserialize_encapsulation) {
if (!RTICdrStream_deserializeAndSetCdrEncapsulation(stream)) {
return RTI_FALSE;
}
position = RTICdrStream_resetAlignment(stream);
}
if (deserialize_key) {
if (!ControlDataPlugin_deserialize_sample(
endpoint_data, sample, stream, RTI_FALSE,
RTI_TRUE, endpoint_plugin_qos)) {
return RTI_FALSE;
}
}
done = RTI_TRUE;
fin:
if (done != RTI_TRUE && RTICdrStream_getRemainder(stream) > 0) {
return RTI_FALSE;
}
if(deserialize_encapsulation) {
RTICdrStream_restoreAlignment(stream,position);
}
return RTI_TRUE;
}
/* ------------------------------------------------------------------------
* Plug-in Installation Methods
* ------------------------------------------------------------------------ */
struct PRESTypePlugin *ControlDataPlugin_new(void)
{
struct PRESTypePlugin *plugin = NULL;
const struct PRESTypePluginVersion PLUGIN_VERSION =
PRES_TYPE_PLUGIN_VERSION_2_0;
RTIOsapiHeap_allocateStructure(
&plugin, struct PRESTypePlugin);
if (plugin == NULL) {
return NULL;
}
plugin->version = PLUGIN_VERSION;
/* set up parent's function pointers */
plugin->onParticipantAttached =
(PRESTypePluginOnParticipantAttachedCallback)
ControlDataPlugin_on_participant_attached;
plugin->onParticipantDetached =
(PRESTypePluginOnParticipantDetachedCallback)
ControlDataPlugin_on_participant_detached;
plugin->onEndpointAttached =
(PRESTypePluginOnEndpointAttachedCallback)
ControlDataPlugin_on_endpoint_attached;
plugin->onEndpointDetached =
(PRESTypePluginOnEndpointDetachedCallback)
ControlDataPlugin_on_endpoint_detached;
plugin->copySampleFnc =
(PRESTypePluginCopySampleFunction)
ControlDataPlugin_copy_sample;
plugin->createSampleFnc =
(PRESTypePluginCreateSampleFunction)
ControlDataPlugin_create_sample;
plugin->destroySampleFnc =
(PRESTypePluginDestroySampleFunction)
ControlDataPlugin_destroy_sample;
plugin->serializeFnc =
(PRESTypePluginSerializeFunction)
ControlDataPlugin_serialize;
plugin->deserializeFnc =
(PRESTypePluginDeserializeFunction)
ControlDataPlugin_deserialize;
plugin->getSerializedSampleMaxSizeFnc =
(PRESTypePluginGetSerializedSampleMaxSizeFunction)
ControlDataPlugin_get_serialized_sample_max_size;
plugin->getSerializedSampleMinSizeFnc =
(PRESTypePluginGetSerializedSampleMinSizeFunction)
ControlDataPlugin_get_serialized_sample_min_size;
plugin->getSampleFnc =
(PRESTypePluginGetSampleFunction)
ControlDataPlugin_get_sample;
plugin->returnSampleFnc =
(PRESTypePluginReturnSampleFunction)
ControlDataPlugin_return_sample;
plugin->getKeyKindFnc =
(PRESTypePluginGetKeyKindFunction)
ControlDataPlugin_get_key_kind;
/* These functions are only used for keyed types. As this is not a keyed
type they are all set to NULL
*/
plugin->serializeKeyFnc = NULL;
plugin->deserializeKeyFnc = NULL;
plugin->getKeyFnc = NULL;
plugin->returnKeyFnc = NULL;
plugin->instanceToKeyFnc = NULL;
plugin->keyToInstanceFnc = NULL;
plugin->getSerializedKeyMaxSizeFnc = NULL;
plugin->instanceToKeyHashFnc = NULL;
plugin->serializedSampleToKeyHashFnc = NULL;
plugin->serializedKeyToKeyHashFnc = NULL;
plugin->typeCode = (struct RTICdrTypeCode *)ControlData_get_typecode();
plugin->languageKind = PRES_TYPEPLUGIN_DDS_TYPE;
/* Serialized buffer */
plugin->getBuffer =
(PRESTypePluginGetBufferFunction)
ControlDataPlugin_get_buffer;
plugin->returnBuffer =
(PRESTypePluginReturnBufferFunction)
ControlDataPlugin_return_buffer;
plugin->getSerializedSampleSizeFnc =
(PRESTypePluginGetSerializedSampleSizeFunction)
ControlDataPlugin_get_serialized_sample_size;
plugin->endpointTypeName = ControlDataTYPENAME;
return plugin;
}
void
ControlDataPlugin_delete(struct PRESTypePlugin *plugin)
{
RTIOsapiHeap_freeStructure(plugin);
}
|
/*
==============================================================================
This file is part of the iPlug 2 library. Copyright (C) the iPlug 2 developers.
See LICENSE.txt for more info.
==============================================================================
*/
#pragma once
/**
* @file
* @copydoc SynthVoice
*/
#include <array>
#include <vector>
#include <stdint.h>
#include "ptrlist.h"
#include "IPlugConstants.h"
#include "IPlugMidi.h"
#include "IPlugLogger.h"
#include "IPlugQueue.h"
#include "ControlRamp.h"
/** A generic synthesizer voice to be controlled by a voice allocator. */
namespace voiceControlNames
{
/** This enum names the control ramps by which we connect a controller to a synth voice.
*/
enum eControlNames
{
kVoiceControlGate = 0,
kVoiceControlPitch,
kVoiceControlPitchBend,
kVoiceControlPressure,
kVoiceControlTimbre,
kNumVoiceControlRamps
};
}
using namespace voiceControlNames;
typedef std::array< ControlRamp, kNumVoiceControlRamps > VoiceInputs;
#pragma mark - Voice class
class SynthVoice
{
public:
virtual ~SynthVoice() {};
/** @return true if voice is generating any audio. */
virtual bool GetBusy() const = 0;
/** Trigger is called by the VoiceAllocator when a new voice should start, or if the voice limit has been hit and an existing voice needs to re-trigger. While the VoiceInputs are sufficient to control a voice from the VoiceAllocator, this method can be used to do additional tasks like resetting oscillators.
* @param level Normalised starting level for this voice, derived from the velocity of the keypress, or in the case of a re-trigger the existing level \todo check
* @param isRetrigger If this is \c true it means the voice is being re-triggered, and you should accommodate for this in your algorithm */
virtual void Trigger(double level, bool isRetrigger) {};
/** As with Trigger, called to do optional tasks when a voice is released. */
virtual void Release() {};
/** Process a block of audio data for the voice
@param inputs Pointer to input channel arrays. Sometimes synthesisers have audio inputs. Alternatively you can pass in modulation from global LFOs etc here.
@param outputs Pointer to output channel arrays. You should add to the existing data in these arrays (so that all the voices get summed)
@param nInputs The number of input channels that contain valid data
@param nOutputs input channels that contain valid data
@param startIdx The start index of the block of samples to process
@param nFrames The number of samples the process in this block
*/
virtual void ProcessSamplesAccumulating(sample** inputs, sample** outputs, int nInputs, int nOutputs, int startIdx, int nFrames)
{
for (auto c = 0; c < nOutputs; c++)
{
for (auto s = startIdx; s < startIdx + nFrames; s++)
{
outputs[c][s] += 0.; // if you are following this no-op example, remember you need to accumulate the output of all the different voices
}
}
}
/** If you have members that need to update when the sample rate changes you can do that by overriding this method
* @param sampleRate The new sample rate */
virtual void SetSampleRate(double sampleRate) {};
/** Implement this to allow picking a sound program from an integer index, as with MIDI
* @param p The new program number */
virtual void SetProgramNumber(int pgm) {};
/** Implement this to respond to control numbers for which there are not ramps. A synthesizer could
* use its own ramps internally if needed.
*/
virtual void SetControl(int controlNumber, float value) {};
protected:
VoiceInputs mInputs;
int64_t mLastTriggeredTime{-1};
uint8_t mVoiceNumber{0};
uint8_t mZone{0};
uint8_t mChannel{0};
uint8_t mKey{0};
double mBasePitch{0.};
double mAftertouch{0.};
double mGain{0.}; // used by voice allocator to hard-kill voices.
friend class MidiSynth;
friend class VoiceAllocator;
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.