Search is not available for this dataset
content
stringlengths 0
376M
|
---|
<reponame>martinda/hdlConvertor<filename>.travis.yml
sudo: required
dist: xenial
language: python
git:
depth: 1
cache:
- apt: true
- pip : true
matrix:
include:
- name: "Python 2.7 gcc-8"
python: 2.7
env: CC=gcc-8 CXX=g++-8
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-8
- gcc-8
- name: "Python 3.5 gcc-8"
python: 3.5
env: CC=gcc-8 CXX=g++-8
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-8
- gcc-8
- name: "Python 3.7 gcc-8"
python: 3.7
env: CC=gcc-8 CXX=g++-8 BUILD_ARGS="--build-type Debug"
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-8
- gcc-8
- name: "Python 3.7 gcc-7"
python: 3.7
env: CC=gcc-7 CXX=g++-7
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-7
- gcc-7
addons:
apt:
sources:
- ubuntu-toolchain-r-test
#- llvm-toolchain-precise-3.9
packages:
- build-essential
- cython
# command to install dependencies
install:
- sudo cp .travis/disco.list /etc/apt/sources.list.d/disco.list
- sudo apt update
- sudo apt install libantlr4-runtime-dev antlr4
- pip install -r requirements.txt
- python setup.py build $BUILD_ARGS
- python setup.py install
deploy:
provider: pypi
user: nic30
password:
secure: jMuoFXUX5kopaE0ezjl6NUtxAmr8SyKpRyqZQtv3TISOCCmyenqeMR3EVckEsslC5NpQOVDQHagevooo2Ysfpu6WaFll+QJgkvB7yvt3ZHNcXj9YITTpjJNmPF58jch5cjb+kFvMvPnvOnbasBtB+KRiV4tmGRXrgP0Qg7mamVnafo3Krq0OY5hAE+fK2WhFfkyZ8xdo1M794mHf9fIRcmT9c9D8cMatYjjbq724g3OJ8wlIWxDfoa012D+G2IdZOd5+aMkNMOPd504ZnUj40T39kVsgXv0kIx42Y/Wz4Om53UT2GwTBllERq4u5nDB6A4NiVTwpx4g9+r6rCjIBtX11bRgMaDCp7Jdn6KWOsKhvQtU4u2ZGUdIRCrIoJj0NInCfedKnF0FqYA4BC7HgbFCIUnpyKxR/uU0Vz7tD/jW4vP5tr4PcgQ2eTRM5eVJZ3dYoxzlFh2MzmjXA4+MamFVCRjmzDdvaBafEvYlyUUq0D2MQ2hMwjyNlcEIKW4eOi2LaVgdwJsKbDQHc66oVDQRRYu77yFW9XGhwFoTEn0km5TTVah0d9JwvqVzWo4R33FexuthR7pHQbIe7Shw6HKpV6JfbLtvYSLLBXK8UGlW7PXYQtAZN81fwZzXd+2RwCr8otJeGzNIHug0HjK4S0CYCx4HNi0qHXOl5B+DiPuw=
on:
tags: true
branch: master
script:
- python setup.py test # the scikit-build automatically executes setup.py develop which requires access to installation dir
|
<gh_stars>100-1000
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: tcdm_interface
authors:
- <NAME> <<EMAIL>>
dependencies:
reqrsp_interface: {path: ../reqrsp_interface}
export_include_dirs:
- include
sources:
# Level 0
- src/tcdm_interface.sv
# Level 1
- src/axi_to_tcdm.sv
- src/reqrsp_to_tcdm.sv
- src/tcdm_mux.sv
- target: simulation
files:
- src/tcdm_test.sv
- target: test
files:
# Level 0
- test/reqrsp_to_tcdm_tb.sv
- test/tcdm_mux_tb.sv
|
<filename>env/azure_pipeline.yaml
# Triggers are currently handled by branch policy for build validation in azure devops
trigger: none
# trigger:
# batch: true
# branches:
# include:
# - master
stages:
- stage: 'all'
jobs:
- job: 'CI_no_license'
pool:
name: CI_no_license
steps:
# Print the pwd to make it easier to find the area where this was run
- script: |
pwd
displayName: 'Display PWD'
- script: |
source activate digital
yapf --diff --recursive . --style env/.style.yapf
# Do an automatic regen with
# yapf --in-place --recursive . --style env/.style.yapf
displayName: 'yapf Check'
- script: |
source activate digital
bazel run --nokeep_state_after_build //tests:buildifier_format_diff
bazel run --nokeep_state_after_build //tests:buildifier_lint
displayName: 'BUILD Lint Check'
- script: |
bazel clean --expunge
./tests/doc_test.sh
displayName: 'Doc Generation Check'
- script: |
source activate digital
bazel test --cache_test_results=no --jobs 8 --test_output=all $(bazel query "tests(//...) except attr(tags, 'no_ci_gate', //...) except attr(tags, 'requires_license', //...)")
displayName: 'Bazel tests (no licenses)'
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFiles: 'bazel-testlogs/**/*.xml'
testRunTitle: 'Publish test results.'
displayName: 'Bazel Tests'
- job: 'CI'
pool:
name: CI
steps:
# Print the pwd to make it easier to find the area where this was run
- script: |
pwd
displayName: 'Display PWD'
- bash: |
source activate digital
find -L `bazel info bazel-testlogs` -name "test.xml" | xargs rm
bazel test --cache_test_results=no --jobs 8 --test_output=all $(bazel query "attr(tags, 'requires_license', tests(//...)) except attr(tags, 'no_ci_gate', //...)")
displayName: 'Bazel Tests'
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFiles: 'bazel-testlogs/**/*.xml'
testRunTitle: 'Publish test results.'
displayName: 'Bazel Tests Publish Results'
|
<filename>.github/workflows/test.yml<gh_stars>10-100
name: Test
on:
push:
pull_request:
jobs:
action:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Run VUnit tests
uses: ./
with:
cmd: test/run.py
container-job:
runs-on: ubuntu-latest
container: ghdl/vunit:mcode-master
steps:
- uses: actions/checkout@v2
- run: test/run.py
container-step:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: docker://ghdl/vunit:mcode-master
with:
args: test/run.py
|
version: 1
formatters:
colored:
format: '[%(asctime)s] (%(color)s%(slvl)s%(crst)s) %(name)-6s %(msgcolor)s%(message)s%(crst)s'
nocolor:
format: '[%(asctime)s] (%(slvl)s) %(name)-6s %(message)s'
handlers:
console:
class: logging.StreamHandler
level: INFO
formatter: colored
stream: ext://sys.stderr
loggers:
vunit:
level: INFO
handlers: [console]
propagate: yes
root:
level: DEBUG
handlers: [console]
|
<gh_stars>0
identifier: fwrisc_test
name: fwrisc SoC
type: mcu
arch: riscv32
toolchain:
- zephyr
ram: 32
testing:
ignore_tags:
- net
- bluetooth
|
<reponame>usmnzain/serv
on: [push, pull_request]
jobs:
lint:
runs-on: ubuntu-latest
name: Linter
steps:
- name: Checkout
uses: actions/checkout@v1
- name: Lint Verilog source files with Verilator
uses: librecores/ci-fusesoc-action@main
with:
command: 'run'
core: 'serv'
target: 'lint'
tool: 'verilator'
|
name: ci
on: [push]
jobs:
################
# Verible Lint #
################
verilog:
name: Verilog Sources
# This job runs on Linux (fixed ubuntu version)
runs-on: ubuntu-18.04
env:
VERIBLE_VERSION: 0.0-807-g10e7c71
steps:
- uses: actions/checkout@v2
- name: Install Verible
run: |
set -e
mkdir -p build/verible
cd build/verible
curl -Ls -o verible.tar.gz https://github.com/google/verible/releases/download/v$VERIBLE_VERSION/verible-v$VERIBLE_VERSION-Ubuntu-18.04-bionic-x86_64.tar.gz
sudo mkdir -p /tools/verible && sudo chmod 777 /tools/verible
tar -C /tools/verible -xf verible.tar.gz --strip-components=1
echo "PATH=$PATH:/tools/verible/bin" >> $GITHUB_ENV
# Run linter in hw/ip subdir
- name: Run Lint
run: |
echo "::add-matcher::.github/verible-lint-matcher.json"
find src \
-not \( -path src/deprecated -prune \) -name "*.sv" | \
xargs verible-verilog-lint --waiver_files lint/common_cells.style.waiver --rules=-interface-name-style --lint_fatal
echo "::remove-matcher owner=verible-lint-matcher::"
|
#
# List of RTL sources. Contrarily to IPs, these reside in
# the current Git repository.
# Uses the YAML syntax.
#
rtl:
path: ..
tech_cells_generic:
path: ../../tech_cells_generic
|
<filename>external/opensocdebug/software/shippable.yml<gh_stars>10-100
language: c
compiler:
- gcc
env:
global:
# Codecov upload token: CODECOV_TOKEN=xxx
# To decrypt/replace go to the Shippable UI into the project settings
- secure: <KEY>
matrix:
# Configure options (resulting in one build job each)
- BUILD_TYPE=valgrind CONFIGURE_OPTS="--with-glip --enable-debug --enable-code-coverage --enable-valgrind --disable-asan" UPLOAD_COVERAGE=false
- BUILD_TYPE=asan CONFIGURE_OPTS="--with-glip --enable-debug --enable-code-coverage --disable-valgrind --enable-asan" UPLOAD_COVERAGE=true
- BUILD_TYPE=release CONFIGURE_OPTS="--with-glip" UPLOAD_COVERAGE=false
build:
ci:
# Build and install GLIP as build dependency
- git clone --depth 1 https://github.com/TUM-LIS/glip.git
- cd glip && ./autogen.sh && ./configure && make && sudo make install && cd ..
# Now build the actual OSD software
- sudo apt-get update
- ./install-build-deps.sh
- ./autogen.sh
- echo "Running configure with $CONFIGURE_OPTS"
- ./configure $CONFIGURE_OPTS
- make
- make check-code-coverage
- make check-valgrind
- sudo make install
# Build and test Python bindings (using the release version of libosd)
- if [ "$BUILD_TYPE" == "release" ]; then cd src/python; tox; cd ../..; fi
post_ci:
# Copy code coverage information where shippable.io finds it
- if [ "$UPLOAD_COVERAGE" == "true" ]; then make -C tests/unit coverage-cobertura-xml; cp tests/unit/coverage-cobertura.xml shippable/codecoverage; fi
on_success:
# Copy test results where shippable.io finds them
- make -C tests/unit check-junit-xml
- cp tests/unit/*.junit.xml shippable/testresults
# Report coverage results to codecov
- if [ "$UPLOAD_COVERAGE" == "true" ]; then bash <(curl -s https://codecov.io/bash); else echo "Not uploading coverage data for this build."; fi
on_failure:
# Dump test logs to stdout for debugging test failures
- if [ -f tests/unit/test-suite.log ]; then cat tests/unit/test-suite.log; fi
- if [ -f tests/unit/test-suite-memcheck.log ]; then cat tests/unit/test-suite-memcheck.log; fi
# Copy test results where shippable.io finds them
- make -C tests/unit check-junit-xml
- cp tests/unit/*.junit.xml shippable/testresults
|
# action.yml
name: 'Hawkeye CLI Scanner'
description: Runs the Hawkeye CLI scanner
runs:
using: 'docker'
image: 'Dockerfile'
|
<reponame>mfkiwl/garnet<gh_stars>0
name: gen_sram_macro
commands:
- bash gen_srams.sh
inputs:
- adk
outputs:
- sram.v
- sram-pwr.v
- sram.lef
- sram_tt.lib
- sram_ff.lib
- sram_tt.db # Need this back for power on ptpx
- sram.gds
- sram.spi
parameters:
num_words: 2048
word_size: 64
mux_size: 8
corner: "tt0p8v25c"
bc_corner: "ffg0p88v125c"
partial_write: False
|
package:
name: apb_fll_if
dependencies:
apb: { git: "https://github.com/pulp-platform/apb.git", version: 0.1.0 }
sources:
- src/fll_intf.sv
- src/apb_fll_if.sv
- src/apb_to_fll.sv
- target: test
files:
- test/apb_fll_tb.sv
|
<reponame>DanielTRYTRYLOOK/RDF-2020
name: usbf_top
clock_port: clk_i
verilog:
- usbf_crc16.v
- usbf_crc5.v
- usbf_defines.v
- usbf_ep_rf.v
- usbf_ep_rf_dummy.v
- usbf_idma.v
- usbf_mem_arb.v
- usbf_pa.v
- usbf_pd.v
- usbf_pe.v
- usbf_pl.v
- usbf_rf.v
- usbf_top.v
- usbf_utmi_if.v
- usbf_utmi_ls.v
- usbf_wb.v
|
<filename>.landscape.yaml
max-line-length: 300
ignore-paths:
- pybar/utils
- pybar/scans/deprecated
ignore-patterns:
- analyze_test_beam
|
hw-mac-engine:
incdirs : [
rtl
]
files : [
rtl/mac_package.sv,
rtl/mac_fsm.sv,
rtl/mac_ctrl.sv,
rtl/mac_streamer.sv,
rtl/mac_engine.sv,
rtl/mac_top.sv,
rtl/aes_1cc.v,
rtl/AddRoundKey.v,
rtl/KeyExpansion.v,
rtl/MixColumns.v,
rtl/ShiftRows.v,
rtl/SubBytes.v,
rtl/SubBytes_sbox.v,
rtl/tempsen.v,
rtl/md5.v,
rtl/keccak.v,
rtl/f_permutation.v,
rtl/mux_func.sv,
rtl/padder.v,
rtl/padder1.v
rtl/const2in1.v,
rtl/round2in1.v,
wrap/mac_top_wrap.sv
]
vlog_opts : [
"-L hwpe_ctrl_lib",
"-L hwpe_stream_lib"
]
|
<reponame>mfkiwl/garnet
name: e2e_testbench
commands:
- bash gen_testbench.sh
# Script extract noteworthy tiles
outputs:
- testbench.sv
- input.raw
- tiles_Tile_MemCore.list
- tiles_Tile_PE.list
- cmd.tcl
parameters:
array_width: 12
array_height: 12
pipeline_config_interval: 8
interconnect_only: False
soc_only: False
PWR_AWARE: False
use_container: True
use_local_garnet: True
app_to_run: "tests/conv_3_3"
waves: False
clock_period: 1.1
postconditions:
- assert File( 'outputs/testbench.sv' ) # must exist
|
name: Manylinux
on:
push:
tags:
- 'v*.*.*'
jobs:
deploy:
strategy:
matrix:
os: [ubuntu-latest]
python-version: [3.7, 3.8]
rust-version: [stable]
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Set up Rust ${{ matrix.rust-version }}
uses: hecrj/setup-rust-action@v1
with:
rust-version: ${{ matrix.rust-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install twine
- name: Build source distribution
run: |
python setup.py sdist
- name: Build Python 3.7 distribution
if: matrix.python-version == '3.7'
run: |
docker pull quay.io/pypa/manylinux1_x86_64
docker run --rm -e PLAT=manylinux1_x86_64 -e PYVER=cp37-cp37m -v `pwd`:/io quay.io/pypa/manylinux1_x86_64 /io/build-manylinux-wheels.sh
- name: Build Python 3.8 distribution
if: matrix.python-version == '3.8'
run: |
docker pull quay.io/pypa/manylinux1_x86_64
docker run --rm -e PLAT=manylinux1_x86_64 -e PYVER=cp38-cp38 -v `pwd`:/io quay.io/pypa/manylinux1_x86_64 /io/build-manylinux-wheels.sh
- name: Publish source to PyPI
if: matrix.python-version == '3.7'
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
twine upload dist/*
- name: Publish binaries to PyPI
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
twine upload wheelhouse/svinst*-manylinux*.whl
|
<filename>tests/config/delta_mark_classification.yml<gh_stars>0
# supported task types are 'classification', 'object_detection' and 'semantic_segmentation'.
task_type: classification
network_name: LmnetV1Quantize
dataset:
format: DeLTA-Mark for Classification
train_path: ./lmnet/tests/fixtures/datasets/custom_delta_mark_classification/for_train
test_path:
trainer:
batch_size: 1
epochs: 1
common:
image_size:
- 128 # height
- 128 # width
# set pretrain model name. currently, this feature is not supported, always ignored.
pretrain_model: false
# enable dataset prefetch, set false if weired problem happens
dataset_prefetch: true
|
<filename>examples/afe/source.yaml
verilog_sources:
sim_ctrl:
files: "sim_ctrl.sv"
fileset: "sim"
ams_models:
files:
- "build/channel/channel.sv"
- "build/ctle1/ctle1.sv"
- "build/ctle2/ctle2.sv"
- "build/ctle3/ctle3.sv"
- "build/nonlin/nonlin.sv"
- "build/unfm/unfm.sv"
- "build/osc/osc.sv"
defines:
DT_MSDSL:
name: DT_MSDSL
value: 62.5e-12
|
name: sasc_top
clock_port: clk
verilog:
- sasc_brg.v
- sasc_fifo4.v
- sasc_top.v
- timescale.v
|
version: 1.0.{build}
image: Visual Studio 2017
shallow_clone: true
configuration: Release
platform: x64
install:
- cmd: echo "Downloading conan..."
- cmd: set PATH=%PATH%;%PYTHON%/Scripts/
- cmd: pip.exe install conan
- cmd: conan user # Create the conan data directory
- cmd: conan --version
- cmd: mkdir build
- cmd: cd build
- cmd: cmake -G"Visual Studio 15 2017 Win64" ..
build:
parallel: true
verbosity: minimal
project: build/slang.sln
test_script:
- cmd: bin\unittests.exe
|
# Human readable task name
name: Comlink
# Long form description.
description: |+
We have captured a spy. They were carrying this device with them. See the README for more details.
# The flag
flag: CTF{HAVE_YOU_EVER_SEEN_A_Z80_CPU_WITH_AN_AES_PERIPHERAL}
# Task category. (one of hardware, crypto, pwn, reversing, web, net, misc)
category: hardware
# === the fields below will be filled by SRE or automation ===
# Task label
label: ''
# URL for web challenges
link: ''
# host/port for non-web challenges
host: 'comlink.2021.ctfcompetition.com 1337'
# the URL for attachments, to be filled in by automation
attachment: ''
# is this challenge released? Will be set by SREs
visible: true
|
run_sim_udp:vivado2019.2:questasim2019.2:
extends: .template_base
image: ${IPBUS_DOCKER_REGISTRY}/ipbus-fw-dev-centos7:2021-06-21__ipbbdev-2021i_uhal2.8.0
tags:
- docker
- docker-cap-net-admin
- docker-device-net-tun
- xilinx-tools
stage: quick_checks
only:
- /^pull-requests.[0-9]+$/
- master
- tags
- web
variables:
VIVADO_VERSION: "2019.2"
IPBB_SIMLIB_BASE: /scratch/xilinx-simlibs
script:
- export PATH=/software/mentor/questa/2019.2/questasim/bin:$PATH
- export PATH=/opt/cactus/bin/uhal/tests:$PATH
- export LD_LIBRARY_PATH=/opt/cactus/lib:$LD_LIBRARY_PATH
- ipbb init work_area
- cd work_area
- ln -s ${CI_PROJECT_DIR} src/ipbus-firmware
- /${CI_PROJECT_DIR}/work_area/src/ipbus-firmware/tests/ci/test-run-sim-udp.sh sim
run_sim_eth:vivado2019.2:questasim2019.2:
extends: .template_base
image: ${IPBUS_DOCKER_REGISTRY}/ipbus-fw-dev-centos7:2021-06-21__ipbbdev-2021i_uhal2.8.0
tags:
- docker
- docker-cap-net-admin
- docker-device-net-tun
- xilinx-tools
stage: quick_checks
only:
- /^pull-requests.[0-9]+$/
- master
- tags
- web
variables:
VIVADO_VERSION: "2019.2"
IPBB_SIMLIB_BASE: /scratch/xilinx-simlibs
script:
- sudo openvpn --mktun --dev tap0
- sudo /sbin/ifconfig tap0 up 192.168.201.1
- sudo chmod a+rw /dev/net/tun
- export PATH=/software/mentor/questa/2019.2/questasim/bin:$PATH
- ipbb init work_area
- cd work_area
- ln -s ${CI_PROJECT_DIR} src/ipbus-firmware
- /${CI_PROJECT_DIR}/work_area/src/ipbus-firmware/tests/ci/test-run-sim-eth.sh
run_ram_slaves_testbench_sim:vivado2019.2:questasim2019.2:
extends: .template_base
image: ${IPBUS_DOCKER_REGISTRY}/ipbus-fw-dev-centos7:2021-06-21__ipbbdev-2021i_uhal2.8.0
tags:
- docker
- docker-cap-net-admin
- docker-device-net-tun
- xilinx-tools
stage: quick_checks
only:
- /^pull-requests.[0-9]+$/
- master
- tags
- web
variables:
VIVADO_VERSION: "2019.2"
IPBB_SIMLIB_BASE: /scratch/xilinx-simlibs
script:
- sudo openvpn --mktun --dev tap0
- sudo /sbin/ifconfig tap0 up 192.168.201.1
- sudo chmod a+rw /dev/net/tun
- export PATH=/software/mentor/questa/2019.2/questasim/bin:$PATH
- ipbb init work_area
- cd work_area
- ln -s ${CI_PROJECT_DIR} src/ipbus-firmware
- ipbb proj create sim ram_slvs_tb ipbus-firmware:tests/ram_slaves top_sim.dep
- cd proj/ram_slvs_tb
- ipbb sim setup-simlib
- ipbb sim ipcores
- ipbb sim fli-udp
- ipbb sim generate-project
- ./run_sim -c work.top -do 'run 10us' -do 'quit'
run_ctr_slaves_testbench_sim:vivado2019.2:questasim2019.2:
extends: .template_base
image: ${IPBUS_DOCKER_REGISTRY}/ipbus-fw-dev-centos7:2021-06-21__ipbbdev-2021i_uhal2.8.0
tags:
- docker
- xilinx-tools
stage: quick_checks
only:
- /^pull-requests.[0-9]+$/
- master
- tags
- web
variables:
VIVADO_VERSION: "2019.2"
IPBB_SIMLIB_BASE: /scratch/xilinx-simlibs
script:
- export PATH=/software/mentor/questa/2019.2/questasim/bin:$PATH
- export LD_LIBRARY_PATH=/opt/cactus/lib:$LD_LIBRARY_PATH
- ipbb init work_area
- cd work_area
- ln -s ${CI_PROJECT_DIR} src/ipbus-firmware
- /${CI_PROJECT_DIR}/work_area/src/ipbus-firmware/tests/ci/test-run-sim-slave-counters.sh
|
<filename>libs/EXTERNAL/libargparse/.travis.yml
language: cpp
dist: trusty #Ubuntu 14.04 by default
sudo: false #Use container based infrastructure
matrix:
include:
#Extensive testing for base compiler
- env: MATRIX_EVAL="CC=gcc-5 CXX=g++-5"
addons: { apt: { packages: ["cmake", "g++-5"], sources: ["ubuntu-toolchain-r-test"] } }
before_install:
- eval "${MATRIX_EVAL}" #Set compiler versions
- echo $CC
- echo $CXX
script:
#Build
- mkdir -p build
- pushd build
- cmake ..
- make -j2
#Test
- ./argparse_example -h
- ./argparse_test
|
<reponame>b1f6c1c4/fpnew
# Copyright 2019 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: FPnew
authors: ["<NAME> <<EMAIL>>"]
dependencies:
common_cells: {git: "https://github.com/pulp-platform/common_cells.git", version: 1.21.0}
fpu_div_sqrt_mvp: {git: "https://github.com/pulp-platform/fpu_div_sqrt_mvp.git", version: 1.0.4}
sources:
- src/fpnew_pkg.sv
- src/fpnew_cast_multi.sv
- src/fpnew_classifier.sv
- src/fpnew_divsqrt_multi.sv
- src/fpnew_fma.sv
- src/fpnew_fma_multi.sv
- src/fpnew_noncomp.sv
- src/fpnew_opgroup_block.sv
- src/fpnew_opgroup_fmt_slice.sv
- src/fpnew_opgroup_multifmt_slice.sv
- src/fpnew_rounding.sv
- src/fpnew_top.sv
|
<reponame>diorga/snitch
package:
name: lowrisc_rv_plic
description: "lowRISC RISC-V Platform Level Interrupt Controller"
authors: ["lowRISC Contributors"]
dependencies:
lowrisc_prim: {path: ../prim}
sources:
- rtl/rv_plic_gateway.sv
- rtl/rv_plic_target.sv
|
udma_core:
files: [
rtl/core/udma_ch_addrgen.sv,
rtl/core/udma_filter_au.sv,
rtl/core/udma_filter_bincu.sv,
rtl/core/udma_filter_rx_dataout.sv,
rtl/core/udma_filter.sv,
rtl/core/udma_filter_tx_datafetch.sv,
rtl/core/udma_arbiter.sv,
rtl/core/udma_core.sv,
rtl/core/udma_rx_channels.sv,
rtl/core/udma_tx_channels.sv,
rtl/common/udma_ctrl.sv,
rtl/common/udma_apb_if.sv,
rtl/common/io_clk_gen.sv,
rtl/common/io_event_counter.sv,
rtl/common/io_generic_fifo.sv,
rtl/common/io_tx_fifo.sv,
rtl/common/io_tx_fifo_mark.sv,
rtl/common/io_tx_fifo_dc.sv,
rtl/common/io_shiftreg.sv,
rtl/common/udma_dc_fifo.sv,
rtl/common/udma_clkgen.sv,
rtl/common/udma_clk_div_cnt.sv,
]
|
<reponame>gokhankici/iodine
name: iodine
version: 0.1.0.0
github: "gokhankici/iodine"
license: MIT
author: "<NAME>"
maintainer: "<EMAIL>"
copyright: "2018 <NAME>"
extra-source-files:
- README.md
- ChangeLog.md
synopsis: Generates the transition relation & verification conditions from a Verilog IR
category: Hardware Verification
description: Please see the README on Github at <https://github.com/gokhankici/iodine#readme>
dependencies:
- base >= 4.7 && < 5
- array
- cmdargs
- containers
- directory
- filepath
- lens
- megaparsec
- mtl
- pretty
- process
- semigroups
- unordered-containers
- ansi-terminal
- liquid-fixpoint
- deepseq
- hashable
- fgl
- fgl-visualize
- cmdargs
- random
- text
- yaml
- aeson
- bytestring
library:
source-dirs: src
exposed-modules:
- Iodine.Runner
- Iodine.Utils
ghc-options:
- -Werror
- -O2
- -fdefer-type-errors
- -fwarn-incomplete-patterns
- -fwarn-incomplete-record-updates
- -fwarn-incomplete-uni-patterns
- -fwarn-missing-signatures
- -fwarn-name-shadowing
- -fwarn-orphans
- -fwarn-overlapping-patterns
- -fwarn-redundant-constraints
- -fwarn-tabs
- -fwarn-unused-binds
- -fwarn-unused-imports
- -fwarn-unused-matches
executables:
iodine:
main: Main.hs
source-dirs: app
ghc-options:
- -threaded
- -rtsopts
- -with-rtsopts=-N
- -Wall
- -Werror
- -O2
dependencies:
- iodine
tests:
iodine-test:
main: Test.hs
source-dirs: test
ghc-options:
- -threaded
- -rtsopts
- -with-rtsopts=-N
- -Wall
- -Werror
- -O2
dependencies:
- iodine
- hspec
- hspec-core
|
<reponame>mikey/microwatt<filename>.travis.yml
# global options
dist: xenial
os: linux
language: shell
services: docker
before_install: docker pull ghdl/vunit:llvm
env:
matrix:
- TASK="tests_unit"
- TASK="tests_console"
- TASK="; apt update && apt install -y python3-pexpect && make -j$(nproc) test_micropython test_micropython_long"
- TASK="{1..99}"
- TASK="{100..199}"
- TASK="{200..299}"
- TASK="{300..399}"
- TASK="{400..499}"
- TASK="{500..599}"
- TASK="{600..699}"
- TASK="{700..799}"
- TASK="{800..899}"
- TASK="{900..999}"
script: docker run -t -v `pwd`:/build -w /build ghdl/vunit:llvm bash -c "make -j$(nproc) $TASK"
stages:
- building
- test
jobs:
include:
- stage: building
env: NAME="Building core"
script: docker run -t -v `pwd`:/build -w /build ghdl/vunit:llvm bash -c "make GNATMAKE='gnatmake -j'$(nproc)"
|
<reponame>StanfordVLSI/dragonphy2
# Adapted from Garnet and ButterPHY
name: termination
commands:
- |
mkdir -p outputs
tar -xvf /home/sjkim85/dragonphy_tarballs/termination-latest.tar.gz -C outputs
mv outputs/termination-0.1.0/* outputs/
outputs:
- termination.lef
- termination.gds
- termination.spi
- termination.version
|
common_cells_all:
files: [
generic_fifo.sv,
generic_LFSR_8bit.sv,
onehot_to_bin.sv,
rstgen.sv,
edge_propagator_tx.sv,
edge_propagator_rx.sv,
edge_propagator.sv,
pulp_sync_wedge.sv,
pulp_sync.sv,
clock_divider.sv,
clock_divider_counter.sv,
]
|
<gh_stars>0
name: "sy_experiment_1"
data:
src: "de"
trg: "en"
train: "postediting/data/multi30k/train.lc.norm.tok"
dev: "postediting/data/multi30k/dev_2017_flickr.lc.norm.tok"
random_train_subset: 10000
level: "word"
lowercase: False
max_sent_length: 50
src_voc_min_freq: 1
trg_voc_min_freq: 1
testing:
beam_size: 5
alpha: 1.0
training:
load_model: "models/sy_transformer_copy_1/best.ckpt"
reset_best_ckpt: False
reset_scheduler: False
reset_optimizer: False
random_seed: 42
optimizer: "adam"
adam_betas: [0.9, 0.999]
scheduling: "plateau"
loss: "crossentropy"
learning_rate: 0.0002
learning_rate_min: 0.00000001
#learning_rate_factor: 2 # factor for Noam scheduler (default: 1)
learning_rate_warmup: 5000 # warmup steps for Noam scheduler
#label_smoothing: 0.1
weight_decay: 0.0
batch_size: 64
batch_type: "token"
eval_batch_size: 10 # mini-batch size for evaluation (see batch_size above)
eval_batch_type: "token" # evaluation batch type ("sentence", default) or tokens ("token")
epochs: 100
early_stopping_metric: "ppl"
validation_freq: 4000
logging_freq: 200
eval_metric: "token_accuracy"
model_dir: "models/sy_transformer_copy_1"
overwrite: True
shuffle: True
use_cuda: True
max_output_length: 50
print_valid_sents: [0, 1, 2]
keep_last_ckpts: 3
model:
initializer: "xavier" # initializer for all trainable weights (xavier, zeros, normal, uniform)
init_gain: 1.0 # gain for Xavier initializer (default: 1.0)
bias_initializer: "zeros" # initializer for bias terms (xavier, zeros, normal, uniform)
embed_initializer: "xavier" # initializer for embeddings (xavier, zeros, normal, uniform)
embed_init_gain: 1.0 # gain for Xavier initializer for embeddings (default: 1.0)
tied_embeddings: False # tie src and trg embeddings, only applicable if vocabularies are the same, default: False
tied_softmax: True
encoder:
type: "transformer"
num_layers: 2
num_heads: 4
embeddings:
embedding_dim: 64
scale: True
dropout: 0.
# typically ff_size = 4 x hidden_size
hidden_size: 64
ff_size: 256
dropout: 0.1
decoder:
type: "transformer"
num_layers: 2
num_heads: 4
embeddings:
embedding_dim: 64
scale: True
dropout: 0.
# typically ff_size = 4 x hidden_size
hidden_size: 64
ff_size: 256
dropout: 0.1
|
<filename>ultra96/ROOT_FS/app/fad/data/OdometryCalculator/PatternMatchingVOCalculator/PatternMatcher/AKAZEPatternMatcher/param.yaml
%YAML 1.2
---
AKAZE: # https://docs.opencv.org/3.4.2/d8/d30/classcv_1_1AKAZE.html
descriptor_type: 5 # 2:DESCRIPTOR_KAZE_UPRIGHT, 3:DESCRIPTOR_KAZE, 4:DESCRIPTOR_MLDB_UPRIGHT, 5:DESCRIPTOR_MLDB
descriptor_size: 0
descriptor_channels: 3
threshold: 0.00001
nOctaves: 4
nOctaveLayers: 4
diffusivity: 1 # DIFF_PM_G1 = 0, DIFF_PM_G2 = 1, DIFF_WEICKERT = 2, DIFF_CHARBONNIER = 3
Matcher:
DescriptorMatcherType: "BruteForce-Hamming"
NNDR: 0.8
matching_dist_thr: 0.60
|
<gh_stars>100-1000
pip_test:
- src_wire: CLBLM_L_X8Y69/CLBLM_L_D3
dst_wire: SLICE_X11Y69.SLICEL/D3
pip_chain_test:
- wires:
- $CONSTANTS_X0Y0.$CONSTANTS/$GND_SOURCE
- $CONSTANTS_X0Y0/$GND_NODE
- TIEOFF_X9Y69.TIEOFF/$GND_SITE_WIRE
- TIEOFF_X9Y69.TIEOFF/HARD0GND_HARD0
- INT_L_X8Y69/GND_WIRE
- wires:
- $CONSTANTS_X0Y0.$CONSTANTS/$VCC_SOURCE
- $CONSTANTS_X0Y0/$VCC_NODE
- TIEOFF_X9Y69.TIEOFF/$VCC_SITE_WIRE
- TIEOFF_X9Y69.TIEOFF/HARD1VCC_HARD1
- INT_L_X8Y69/VCC_WIRE
- wires:
- $CONSTANTS_X0Y0.$CONSTANTS/$VCC_SOURCE
- $CONSTANTS_X0Y0/$VCC_NODE
- SLICE_X11Y69.SLICEL/$VCC_SITE_WIRE
- SLICE_X11Y69.SLICEL/CEUSEDVCC_HARD1
- wires:
- $CONSTANTS_X0Y0.$CONSTANTS/$GND_SOURCE
- $CONSTANTS_X0Y0/$GND_NODE
- SLICE_X11Y69.SLICEL/$GND_SITE_WIRE
- SLICE_X11Y69.SLICEL/SRUSEDGND_HARD0
bel_pin_test:
- bel: SLICE_X14Y63.SLICEL/D6LUT
pin: A3
wire: SLICE_X14Y63.SLICEL/D3
- bel: $CONSTANTS_X0Y0.$CONSTANTS/GND
pin: G
wire: $CONSTANTS_X0Y0.$CONSTANTS/$GND_SOURCE
- bel: $CONSTANTS_X0Y0.$CONSTANTS/VCC
pin: P
wire: $CONSTANTS_X0Y0.$CONSTANTS/$VCC_SOURCE
|
<filename>.github/workflows/documentation.yml<gh_stars>0
name: Documentation
on:
push:
branches:
- develop
pull_request:
jobs:
markdown-link-check:
name: 'Check Markdown links'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: gaurav-nelson/github-action-markdown-link-check@v1
book:
name: Book
runs-on: ubuntu-latest
env:
MDBOOK_VERSION: '0.4.6'
steps:
- uses: actions/checkout@v2
- name: Install mdbook
run: |
mkdir -p $HOME/mdbook
curl -L https://github.com/rust-lang/mdBook/releases/download/v$MDBOOK_VERSION/mdbook-v$MDBOOK_VERSION-x86_64-unknown-linux-gnu.tar.gz | tar xz -C $HOME/mdbook
echo "${HOME}/mdbook/" >> $GITHUB_PATH
- name: Build
run: mdbook build
working-directory: docs
- uses: actions/upload-artifact@v2
with:
name: book
path: docs/book
cpp:
name: 'C++'
runs-on: ubuntu-latest
strategy:
matrix:
include:
- module: fletchgen
source: codegen/cpp/fletchgen
- module: runtime
source: runtime/cpp
steps:
- uses: actions/checkout@v2
- name: Install doxygen
run: sudo apt-get install -y doxygen
- name: Doxygen
run: doxygen
working-directory: ${{ matrix.source }}
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.module }}
path: ${{ matrix.source }}/docs/html
deploy:
name: Deploy
runs-on: ubuntu-latest
needs: [book, cpp]
if: github.event_name == 'push' && github.ref == 'refs/heads/develop'
steps:
- uses: actions/download-artifact@v2
with:
name: book
- uses: actions/download-artifact@v2
with:
name: fletchgen
path: api/fletchgen
- uses: actions/download-artifact@v2
with:
name: runtime
path: api/fletcher-cpp
- uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: .
|
<filename>ultra96/ROOT_FS/app/fad/data/OdometryCalculator/PatternMatchingVOCalculator/param.yaml
%YAML 1.2
---
param:
match_method: "TM" # "AKAZE", "TM"
line_area_thr: 0.040 # 白線領域として許容する面積のしきい値(m^2)
cutting_size_of_ref_map: 1.70 # マッチング処理において参照地図を切り取る際の1辺の長さ
pattern_matcher_delta_threshord: 0.25 # 対応点のズレの許容値(単位m)
cutting_left_ratio_of_pattern_image: 0.15 # 路面画像の左右を切り落とす割合
cutting_right_ratio_of_pattern_image: 0.15 # 路面画像の左右を切り落とす割合
cutting_above_ratio_of_pattern_image: 0.00 # 路面画像の上を切り落とす割合
scale_ratio: 0.90 # マッチング処理時の参照地図・路面画像の拡縮率
|
<reponame>AltisourceLabs/ecloudmanager<gh_stars>0
controllerPackage: org.ecloudmanager.rest
modelPackage: org.ecloudmanager.node.model
swaggerUrl: swagger.yaml
environment: development
entityProcessors:
- json
- xml
- yaml
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: UNet(2D) for segmentation on ChaosCT.
input size: 512*512
float ops: 23.3G
task: segmentation
framework: pytorch
prune: 'no'
version: 2.0
files:
- name: pt_unet_chaos-CT_512_512_23.3G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_unet_chaos-CT_512_512_23.3G_2.0.zip
checksum: 94391f7b2c5b0e3d59e3c70d90e16a67
- name: unet_chaos-CT_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=unet_chaos-CT_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: a5a5d886b87794a5d650360dcbc338bb
- name: unet_chaos-CT_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=unet_chaos-CT_pt-vck190-r2.0.0.tar.gz
checksum: 52c72a847efdf3255927efdb481df8a1
- name: unet_chaos-CT_pt
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=unet_chaos-CT_pt-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: bd26d58637f4400916b181ecb1fdd65d
- name: unet_chaos-CT_pt
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=unet_chaos-CT_pt-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: 2b46f47af58719a626ab369a5126f2aa
- name: unet_chaos-CT_pt
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=unet_chaos-CT_pt-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: e74edc4a3f9715fc2e3daa5b9b61dc8c
- name: unet_chaos-CT_pt
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=unet_chaos-CT_pt-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: ed28522740ea1670b57ea6d211e8017f
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
before_script:
- export CXX=g++-7 CC=gcc-7
# paths to local or network installations (the riscv toolchain and
# verilator are not built in the ci job as in travis)
- export QUESTASIM_HOME=
- export QUESTASIM_VERSION=
- export QUESTASIM_FLAGS=
- export RISCV=/scratch/$USER/projects/riscv_install
- export VERILATOR_ROOT=/scratch/$USER/projects/verilator-3.924
# setup dependent paths
- export PATH=${RISCV}/bin:$VERILATOR_ROOT/bin:${PATH}
- export LIBRARY_PATH=$RISCV/lib
- export LD_LIBRARY_PATH=$RISCV/lib
- export C_INCLUDE_PATH=$RISCV/include:$VERILATOR_ROOT/include
- export CPLUS_INCLUDE_PATH=$RISCV/include:$VERILATOR_ROOT/include
# number of parallel jobs to use for make commands and simulation
- export NUM_JOBS=4
- ci/make-tmp.sh
- git submodule update --init --recursive
variables:
GIT_SUBMODULE_STRATEGY: recursive
stages:
- build
- test_std
# prepare
build:
stage: build
script:
- ci/build-riscv-tests.sh
- ci/get-torture.sh
- make clean
- make torture-gen
artifacts:
paths:
- tmp
# rv64ui-p-* and rv64ui-v-* tests
run-asm-tests-questa:
stage: test_std
script:
- make -j${NUM_JOBS} run-asm-tests
dependencies:
- build
run-benchmarks-questa:
stage: test_std
script:
- make -j${NUM_JOBS} run-benchmarks
dependencies:
- build
# rv64ui-p-* tests
run-asm-tests1-verilator:
stage: test_std
script:
- make -j${NUM_JOBS} run-asm-tests1-verilator
dependencies:
- build
# rv64ui-v-* tests
run-asm-tests2-verilator:
stage: test_std
script:
- make -j${NUM_JOBS} run-asm-tests2-verilator
dependencies:
- build
run-benchmarks-verilator:
stage: test_std
script:
- make -j${NUM_JOBS} run-benchmarks-verilator
dependencies:
- build
torture:
stage: test_std
script:
- make torture-rtest
- make torture-rtest-verilator
dependencies:
- build
|
<reponame>icgrp/doblink
# @package _global_
do_blink:
backend: vivado
figure: fig1
sub_figure: b
device: 19eg
part: xczu19eg-ffvb1517-2-e
num_luts: 2400
bft: bft8
use_abs: True
|
name: Regression
on:
push:
pull_request:
schedule:
- cron: 0 11 * * ?
jobs:
linux:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Python 3.7
uses: actions/setup-python@v1
with:
python-version: 3.7
- name: Update pip
run: python -m pip install --upgrade pip
- name: Install dependencies
run: sudo apt-get install g++-7 libgmp-dev libmpfr-dev libmpc-dev iverilog verilator
- name: Run regression test
env:
CC: gcc-7
CXX: g++-7
run: source regress.sh
- name: Upload coverage
uses: codecov/codecov-action@v1.0.5
with:
token: ${{ secrets.CODECOV_TOKEN }}
mac:
runs-on: macOS-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Python 3.7
uses: actions/setup-python@v1
with:
python-version: 3.7
- name: Update pip
run: python -m pip install --upgrade pip
- name: Install dependencies
run: |
brew install icarus-verilog verilator wget coreutils
- name: Run regression test
run: source regress.sh
- name: Upload coverage
uses: codecov/codecov-action@v1.0.5
with:
token: ${{ secrets.CODECOV_TOKEN }}
|
<filename>Bender.yml
package:
name: "ntx"
version: "1.0.0"
authors:
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
sources:
- src/ntx_tools_pkg.vhd
- src/ntx_lzc.vhd
- src/ntx_fifo_asic.vhd
- src/ntx_fifo_asic_oregs.vhd
- src/ntx_fifo_d1.vhd
- src/ntx_fifo_d2.vhd
- src/ntx_fifo.vhd
- src/fp32_pkg.vhd
- src/fp32_pcsAdd.vhd
- src/fp32_pcsAdd2.vhd
- src/fp32_pcsCarryProp.vhd
- src/fp32_pcsMult.vhd
- src/fp32_norm.vhd
- src/fp32_mac.vhd
- src/fp32_toPcs.vhd
- src/ntx_pkg.vhd
- src/ntx_ctrl.vhd
- src/ntx_dag.vhd
- src/ntx_fifo_fpga.vhd
- src/ntx_fpu_alu.vhd
- src/ntx_fpu.vhd
- src/ntx_regIf.vhd
- src/ntx.vhd
|
dist: xenial
language: minimal
services:
- docker
before_install:
- docker pull tilk/riscv-gnu-toolchain
- docker run --name rv -t -d tilk/riscv-gnu-toolchain /bin/bash
- docker exec rv apt-get update
- docker exec rv apt-get install -y verilator g++
- docker cp . rv:/root/rv
script:
- docker exec --env PATH=/root/riscv-prefix/bin:/usr/bin:/bin rv bash -c "cd rv/tests && make"
- docker exec rv bash -c "cd rv/testbench; make"
# - rm -rf tests
# - docker cp rv:/root/tests tests
# - verilator --version
# - cd testbench; make
|
---
input_file : ../akane/02_word_package.akd
output_file : 02_word_package.md
image_url :
"Fig.1 ワードの構成" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/35e6911d-d654-1426-bd0a-ba5bb52b2642.jpeg"
"Fig.2 BITS Field" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/180d510e-2979-9238-84d8-9919b998ea0c.jpeg"
"Fig.3 DATA_BITS DATA_LO DATA_HI Field" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/8bbd5209-1990-c7a1-ac1f-b21a71e59403.jpeg"
"Fig.4 DATA_COMP_HI DATA_COMP_LO Field" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/13d8b912-5850-623a-440f-e8ec8293e2e8.jpeg"
"Fig.5 ATRB_BITS ATRB_LO ATRB_HI Field" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/382ad1e7-8a57-5285-1268-743a583ae5b2.jpeg"
"Fig.6 ATRB_NONE_POS Field" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/eeda5918-fe2b-ad80-45a9-4a6c1fba162d.jpeg"
"Fig.7 ATRB_PRIORITY_POS Field" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/dfe820d8-16a1-53e8-5482-ab947064e9c8.jpeg"
"Fig.8 ATRB_POSTPEND_POS Field" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/6faae68e-b772-78cc-588f-a3c4e1fffccf.jpeg"
link_list :
- id : "「はじめに」"
title: "「VHDL で書くマージソーター(はじめに)」@Qiita"
url : "https://qiita.com/ikwzm/items/6665b2ef44d878a5b85f"
- id : "「ワードの定義」"
title: "「VHDL で書くマージソーター(ワードの定義)」@Qiita"
url : "https://qiita.com/ikwzm/items/bdcd8876317b908ff492"
- id : "「ワード比較器」"
title: "「VHDL で書くマージソーター(ワード比較器)」@Qiita"
url : "https://qiita.com/ikwzm/items/d5d1dd264b1670f33bd7"
- id : "「ソーティングネットワーク」"
title: "「VHDL で書くマージソーター(ソーティングネットワーク)」@Qiita"
url : "https://qiita.com/ikwzm/items/a1d06e47523759c726ae"
- id : "「バイトニックマージソート」"
title: "「VHDL で書くマージソーター(バイトニックマージソート)」@Qiita"
url : "https://qiita.com/ikwzm/items/366eacbf6a877994c955"
- id : "「バッチャー奇偶マージソート」"
title: "「VHDL で書くマージソーター(バッチャー奇偶マージソート)」@Qiita"
url : "https://qiita.com/ikwzm/items/c21a53f21b87408a7805"
- id : "「シングルワード マージソート ノード」"
title: "「VHDL で書くマージソーター(シングルワード マージソート ノード)」@Qiita"
url : "https://qiita.com/ikwzm/items/7fd7ef9ffc4d9b314fee"
- id : "「マルチワード マージソート ノード」"
title: "「VHDL で書くマージソーター(マルチワード マージソート ノード)」@Qiita"
url : "https://qiita.com/ikwzm/items/ed96b7a44b83bcee4ba5"
- id : "「マージソート ツリー」"
title: "「VHDL で書くマージソーター(マージソート ツリー)」@Qiita"
url : "https://qiita.com/ikwzm/items/1f76ae5cda95aaf92501"
- id : "「端数ワード処理」"
title: "「VHDL で書くマージソーター(端数ワード処理)」@Qiita"
url : "https://qiita.com/ikwzm/items/6b15340f1e05ef03f8d0"
- id : "「ストリーム入力」"
title: "「VHDL で書くマージソーター(ストリーム入力)」@Qiita"
url : "https://qiita.com/ikwzm/items/56e22511021a082a2ccd"
- id : "「ストリームフィードバック」"
title: "「VHDL で書くマージソーター(ストリームフィードバック)」@Qiita"
url : "https://qiita.com/ikwzm/items/e8c59c0ec92956c9355f"
- id : "「ArgSort IP」"
title: "「VHDL で書くマージソーター(ArgSort IP)」@Qiita"
url : "https://qiita.com/ikwzm/items/89fc9542492fca74c9e3"
- id : "「ArgSort-Ultra96」"
title: "「VHDL で書くマージソーター(ArgSort-Ultra96)」@Qiita"
url : "https://qiita.com/ikwzm/items/d58c9b77d038e23ac792"
- id : "「ArgSort-Kv260」"
title: "「VHDL で書くマージソーター(ArgSort-Kv260)」@Qiita"
url : "https://qiita.com/ikwzm/items/ec0f779534c44b35334a"
seg_level : -1
|
name: Implementation
on:
push:
pull_request:
schedule:
- cron: '0 0 * * 5'
workflow_dispatch:
jobs:
all-in-one:
runs-on: ubuntu-latest
name: '🛳️ All-in-one'
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '⚙️ Generate bitstream'
uses: docker://ghcr.io/hdl/debian-buster/impl
with:
args: make -C boards/UPduino_v3_ghdl-yosys-nextpnr/ all
- name: '📤 Upload Artifact: bitstream'
uses: actions/upload-artifact@v2
with:
name: UPduino_v3_ghdl-yosys-nextpr
path: |
boards/UPduino_v3_ghdl-yosys-nextpnr/neorv32_upduino_v3_impl_1.bin
boards/UPduino_v3_ghdl-yosys-nextpnr/yosys-report.txt
boards/UPduino_v3_ghdl-yosys-nextpnr/nextpnr-report.txt
|
#this is a example of how ENC ip is used in our yaml
PWM_CAPTURE:
IP: __PWM_CAPTURE
CMD: CAP
pwm_input:
PIN: A34
DIRECTION: in
rs232_tx:
PIN: A35
DIRECTION: OUT
|
on: [workflow_dispatch, push]
name: 'Documentation'
jobs:
docs_test_job:
timeout-minutes: 10
runs-on: ubuntu-latest
name: 'Test documentation build'
steps:
- uses: actions/checkout@v2
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install graphviz
- run: |
python3 -m venv create $GITHUB_WORKSPACE/clean_env --clear
source $GITHUB_WORKSPACE/clean_env/bin/activate
echo $VIRTUAL_ENV
pip3 install -r $GITHUB_WORKSPACE/requirements.txt -e $GITHUB_WORKSPACE/.
cd docs
make html
|
- Global:
Print : true
- Library:
Name : merge_sorter
Format : "add_vhdl_file sources_1 #{library_name} #{file_name}"
PathList : ["../../../src/main/vhdl"]
- Library:
Name : pipework
Format : "add_vhdl_file sources_1 #{library_name} #{file_name}"
PathList : ["../../../PipeWork/src/"]
- Library:
Name : dummy_plug
Format : "add_vhdl_file sim_1 #{library_name} #{file_name}"
Exclude : ["../../../Dummy_Plug/src/main/vhdl/core/sync_alt.vhd"]
PathList : ["../../../Dummy_Plug/src/main/vhdl/"]
- Library:
Name : work
Format : "add_vhdl_file sim_1 #{library_name} #{file_name}"
PathList : ["../../../src/test/vhdl"]
Top : ["OddEven_Sorter_Test_Bench"]
|
# if trusty is not used compilation is killed (out or mem)
sudo: required
dist: trusty
language: python
git:
depth: 1
cache:
apt: true
# clang is required because there is not enought ram for gcc
compiler: gcc
python:
- "3.5"
addons:
apt:
sources:
- ubuntu-toolchain-r-test
#- llvm-toolchain-precise-3.9
packages:
- g++-5
#- clang-3.9
#env: CC=clang-3.9 CXX=clang-3.9
# we have to add -fpermissive otherwise pillow will not compile
env: CC=gcc-5 CXX=g++-5 CFLAGS=-fpermissive
# command to install dependencies
install:
- cd ..
- pip3 install coveralls
- git clone https://github.com/Nic30/HWToolkit.git
- git clone https://github.com/Nic30/hwtLib.git
- git clone https://github.com/Nic30/hdlConvertor.git
- cd HWToolkit
- python3 setup.py install
- cd ../hwtLib
- python3 setup.py install
- cd ../hdlConvertor
- python3 setup.py install
- cd ../hwtHdlParsers
- python3 setup.py install
# command to run tests
script:
- python3 -m pytest hwtHdlParsers/tests/all.py --doctest-modules -v
after_success:
- coveralls
|
# To use:
#
# pre-commit run -a
#
# Or:
#
# pre-commit install # (runs every time you commit in git)
#
# To update this file:
#
# pre-commit autoupdate
#
# See https://github.com/pre-commit/pre-commit
repos:
# Standard hooks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
hooks:
- id: check-added-large-files
- id: check-case-conflict
- id: check-merge-conflict
- id: check-symlinks
- id: check-yaml
- id: debug-statements
- id: end-of-file-fixer
- id: mixed-line-ending
- id: requirements-txt-fixer
- id: trailing-whitespace
- id: fix-encoding-pragma
# Black, the code formatter, natively supports pre-commit
- repo: https://github.com/psf/black
rev: 20.8b1
hooks:
- id: black
# By default, this ignores pyi files, though black supports them
types: [text]
files: \.pyi?$
# Changes tabs to spaces
- repo: https://github.com/Lucas-C/pre-commit-hooks
rev: v1.1.9
hooks:
- id: remove-tabs
# Flake8 also supports pre-commit natively (same author)
- repo: https://gitlab.com/pycqa/flake8
rev: 3.8.3
hooks:
- id: flake8
additional_dependencies: [flake8-bugbear, pep8-naming]
exclude: ^(docs/.*|tools/.*)$
# CMake formatting
- repo: https://github.com/cheshirekow/cmake-format-precommit
rev: v0.6.13
hooks:
- id: cmake-format
additional_dependencies: [pyyaml]
types: [file]
files: (\.cmake|CMakeLists.txt)(.in)?$
# Check static types with mypy
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.790
hooks:
- id: mypy
# The default Python type ignores .pyi files, so let's rerun if detected
types: [text]
files: ^pybind11.*\.pyi?$
# Running per-file misbehaves a bit, so just run on all files, it's fast
pass_filenames: false
# Checks the manifest for missing files (native support)
- repo: https://github.com/mgedmin/check-manifest
rev: "0.43"
hooks:
- id: check-manifest
# This is a slow hook, so only run this if --hook-stage manual is passed
stages: [manual]
additional_dependencies: [cmake, ninja]
# The original pybind11 checks for a few C++ style items
- repo: local
hooks:
- id: disallow-caps
name: Disallow improper capitalization
language: pygrep
entry: PyBind|Numpy|Cmake|CCache
exclude: .pre-commit-config.yaml
- repo: local
hooks:
- id: check-style
name: Classic check-style
language: system
types:
- c++
entry: ./tools/check-style.sh
|
hosts:
# All fields in a host object are optional!
# Interfaces can be defined by connections from switches
# The following is the simplest host:
h1 : {}
# Hosts can optionally define interfaces
h2 :
interfaces:
- ip: 10.0.0.11/24
# _MUST_ put mac address in quotes if it only consist of numerals
# otherwise yaml thinks it's a ratio.
mac: '11:11:11:11:11:11'
# You can set the name of the interface here
# Relevant mostly for replaying or sending packets on that interface
name: 'IAmAnInterface'
# First interface without a specified port is set
# to the default port, port: 0
- ip: 192.168.3.11/8
port: 1 # The port can also be specified explicitly
link: s3 # If it is a non-0 port, link must be specified explicitly
# All nodes must be connected _somehow_, so if no interfaces are defined here,
# a switch must define a link to this node
h3 :
# Hosts can also have programs specified to run on startup
# These programs can also be specified from the command line with:
# --host-prog "h2:echo 'Hello from host 2'"
programs:
- "echo 'Hello from host 2'"
switches:
# Each switch MUST specify a cfg file pointing to a valid json,
# compiled with p4c-bm2-ss
s1:
# cfg is a required field
# (all paths are relative to this yaml file)
cfg: ../../build/bmv2/Complete.json
# Switch interfaces function the same as hosts, except
# that the "link" field is not optional
interfaces:
- link: h1
ip: 10.1.2.3
# This MAC address doesn't only consist of numerals, therefore
# no need to quote since YAML wouldn't treat it as a ratio.
mac: FF:FF:FF:FF:EE:DD
- link: h2
- link: s2
port: 5 # Port can be specified explicitly. Otherwise,
# it will be set to the index of the iface in the list
# Specification of commands to be sent to switch from controller
cmds:
# Commands can be specified as a file, containing
# newline-delimited lists of commands:
- ../controller_files/compressor_commands.txt
# Or verbatim:
- table_add forward set_egress 0 => 1
- table_add forward set_egress 1 => 0
s2:
cfg: ../../build/bmv2/Dropper.json
# If interfaces are not specified, they will be inferred
# from connecting switches.
# This can potentially obfuscate port numbers, however
# "replay" specifies pcap files to be tcpreplay'd from
# this switch to the specified switches
# (e.g. this sends the lldp_* file from s2 to s1 & s3)
replay:
s1: ../pcaps/lldp_enable_fec.pcap
s3: ../pcaps/lldp_enable_fec.pcap
# This is simply a different format for yaml lists. Take your pick
cmds: [../controller_files/forwarding_commands.txt]
s3:
cfg: ../../build/bmv2/Complete.json
interfaces:
- link: s2
- link: h2
- link: h3 # Must link to h3 somewhere, because it doesn't self-define links
cmds:
- ../controller_files/decompressor_commands.txt
- ../controller_files/forwarding_commands.txt
- ../controller_files/fec_encoder_commands.txt
- ../controller_files/fec_decoder_commands.txt
|
docs_list_title: documents
docs:
- title: User Manual
url: ./pdf/RFSoC_2x2_UG.pdf
- title: Board schematics
url: ./pdf/HTG-ZRF2-XUP_REV_11_Schematic_20Jan21.pdf
|
dist: bionic
language: C
compiler: gcc
git:
depth: 1
before_install:
- export INSTALL_DIR=~/symbiflow
- SRC=https://github.com/QuickLogic-Corp/quicklogic-fpga-toolchain/releases/download/v1.2.0/Symbiflow_v1.2.0.gz.run
- RUNFILE=$(basename $SRC)
- echo "retrive new file"
- wget $SRC
- echo 'y' | bash $RUNFILE
- export PATH="$INSTALL_DIR/install/bin:$INSTALL_DIR/install/bin/python:$PATH"
- source "$INSTALL_DIR/conda/etc/profile.d/conda.sh"
- conda activate
jobs:
include:
-name: "build"
script:
bash ci/makeall.csh
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: PointPainting on nuScenes
task: Sensor-fusion with 2d semantic segmentation and 3d detection
framework: pytorch
prune: 'no'
version: 1.4
part 1: PointPillars of PonitPainting
task 1: 3d detection for sensor fusion
input size 1: 40000*64*16
float ops 1: 112G
part 2: SemanticFPN of PointPainting
task 2: 2d semantic segmentation for sensor fusion
input size 2: 320*576*3
float ops 2: 14G
files:
- name: pt_pointpainting_nuscenes_1.4
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_pointpainting_nuscenes_1.4.zip
checksum: 193a48f6db925a8dbfc990f5becf4290
- name: pointpainting_nuscenes_40000_64_0_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=pointpainting_nuscenes_40000_64_0_pt-zcu102_zcu104_kv260-r1.4.1.tar.gz
checksum: ac3904eee9c931f29d316f53d82d17d0
- name: pointpainting_nuscenes_40000_64_1_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=pointpainting_nuscenes_40000_64_1_pt-zcu102_zcu104_kv260-r1.4.1.tar.gz
checksum: a019a7f4a70ad582cf1ba7c205382803
- name: semanticfpn_nuimage_576_320_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=semanticfpn_nuimage_576_320_pt-zcu102_zcu104_kv260-r1.4.1.tar.gz
checksum: c97d4f7e52844ef6511b040553ba631f
- name: pointpainting_nuscenes_40000_64_0_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=pointpainting_nuscenes_40000_64_0_pt-vck190-r1.4.1.tar.gz
checksum: d088440d08cef616a58d892e15254b39
- name: pointpainting_nuscenes_40000_64_1_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=pointpainting_nuscenes_40000_64_1_pt-vck190-r1.4.1.tar.gz
checksum: 4d1ef36d6f5fc897e314859a2c0166ef
- name: semanticfpn_nuimage_576_320_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=semanticfpn_nuimage_576_320_pt-vck190-r1.4.1.tar.gz
checksum: 747c2c7b356de2aa974b08fc036f8f70
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
dist: xenial
language: cpp
env:
global:
# versions
- ARROW_VERSION=0.13.0
- AWS_FPGA_VERSION=1.4.8
- CAPI_SNAP_VERSION=1.5.1
- CAPI_PSLSE_VERSION=4.1
- PSLVER=8
- MDBOOK_VERSION=0.2.1
# fletcher
- FLETCHER_CPP=0
- FLETCHER_ECHO=0
- FLETCHER_AWS=0
- FLETCHER_SNAP=0
- FLETCHER_GEN=0
- FLETCHER_PYTHON=0
- FLETCHER_TESTS=0
- FLETCHER_DOCS=0
- SOURCE_PATH=
# cmake
- CTEST_OUTPUT_ON_FAILURE=1
# gcc
- MATRIX_EVAL="CC=gcc-7 && CXX=g++-7"
matrix:
include:
# all
- name: "[C++] Fletcher"
env: FLETCHER_CPP=1 FLETCHER_ECHO=1 FLETCHER_AWS=1 FLETCHER_SNAP=1 FLETCHER_GEN=1
# common
- name: "[C++] Common"
env: SOURCE_PATH=common/cpp FLETCHER_TESTS=1
# runtime
- name: "[C++] Runtime"
env: SOURCE_PATH=runtime/cpp FLETCHER_TESTS=1
# - name: "[Python] pyfletcher"
# env: FLETCHER_CPP=1 FLETCHER_ECHO=1 FLETCHER_PYTHON=1
# codegen
- name: "[C++] Fletchgen"
env: SOURCE_PATH=codegen/fletchgen
# snap platform
- name: "[C++] Snap platform"
env: SOURCE_PATH=platforms/snap/runtime FLETCHER_SNAP=1
# aws platform
- name: "[C++] AWS-F1 platform"
env: SOURCE_PATH=platforms/aws-f1/runtime FLETCHER_AWS=1
# echo platform
- name: "[C++] Echo platform"
env: SOURCE_PATH=platforms/echo/runtime
- name: "Docs"
env: FLETCHER_DOCS=1
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-7
before_install:
- curl -sSL "https://dist.apache.org/repos/dist/dev/arrow/KEYS" | sudo -E apt-key add -
- echo "deb [arch=amd64] https://dl.bintray.com/apache/arrow/ubuntu/ xenial main" | sudo tee -a /etc/apt/sources.list
- sudo apt-get update -qq
- sudo apt-get install -yq libarrow-dev=${ARROW_VERSION}-1
- eval "${MATRIX_EVAL}"
script:
- |
if [ $FLETCHER_PYTHON -eq 1 ]; then
curl "https://bootstrap.pypa.io/get-pip.py" | sudo python3 &&
sudo pip install cython numpy pyarrow==$ARROW_VERSION &&
export PYARROW_DIR=`python3 -c "import pyarrow as pa; print(pa.get_library_dirs()[0])"`
fi
- |
if [ $FLETCHER_AWS -eq 1 ]; then
git clone --single-branch --depth 1 --branch v$AWS_FPGA_VERSION https://github.com/aws/aws-fpga &&
pushd aws-fpga &&
source sdk_setup.sh &&
popd
fi
- |
if [ $FLETCHER_SNAP -eq 1 ]; then
git clone --single-branch --depth 1 --branch v$CAPI_PSLSE_VERSION https://github.com/ibm-capi/pslse &&
pushd pslse &&
export PSLSE_ROOT=`pwd` &&
popd &&
git clone --single-branch --depth 1 --branch v$CAPI_SNAP_VERSION https://github.com/open-power/snap &&
pushd snap &&
export SNAP_ROOT=`pwd` &&
BUILD_SIMCODE=1 make software &&
popd
fi
- mkdir -p build
- pushd build
- cmake
-DFLETCHER_CPP=$FLETCHER_CPP
-DFLETCHER_ECHO=$FLETCHER_ECHO
-DFLETCHER_AWS=$FLETCHER_AWS
-DFLETCHER_SNAP=$FLETCHER_SNAP
-DFLETCHER_GEN=$FLETCHER_GEN
-DFLETCHER_PYTHON=$FLETCHER_PYTHON
-DPYARROW_DIR=$PYARROW_DIR
-DFLETCHER_TESTS=$FLETCHER_TESTS
../$SOURCE_PATH
- make -j
- |
if [ $FLETCHER_TESTS -eq 1 ]; then
make test
fi
- popd
- |
if [ $FLETCHER_PYTHON -eq 1 ]; then
pushd build &&
sudo make install &&
popd &&
pushd runtime/python &&
sudo python3 setup.py install &&
sudo ldconfig &&
python3 testing/test.py &&
popd
fi
before_deploy:
- pushd docs
- tar xvfz <(curl -sL https://github.com/rust-lang-nursery/mdBook/releases/download/v$MDBOOK_VERSION/mdbook-v$MDBOOK_VERSION-x86_64-unknown-linux-gnu.tar.gz)
- ./mdbook build
deploy:
provider: pages
skip-cleanup: true
github-token: $GITHUB_TOKEN
local-dir: docs/book
keep-history: false
on:
branch: develop
condition: $FLETCHER_DOCS = 1
|
<gh_stars>0
language: c
cache:
apt: true
os:
- linux
dist: trusty
addons:
apt:
sources:
- sourceline: ppa:saltmakrell/ppa
packages:
- make
- iverilog
- yosys
env:
- SYNTH_KW=
- SYNTH_KW=synth_
before_script:
- make --no-print-directory ${SYNTH_KW}build
script:
- make --no-print-directory --keep-going ${SYNTH_KW}test
|
<filename>stack.yaml
resolver: lts-6.18
packages:
- '.'
extra-deps:
- netlist-0.3.1
- netlist-to-vhdl-0.3.2
- sized-types-0.5.1
flags: {}
extra-package-dbs: []
|
package:
name: scm
sources:
- src/latch_register_file_1r_1w.sv
- src/latch_register_file_1r_1w_all.sv
- src/latch_register_file_1r_1w_test_wrap.sv
- src/latch_register_file_1r_1w_all_test_wrap.sv
- src/ff_register_file_1r_1w.sv
- src/ff_register_file_1r_1w_all.sv
- src/ff_register_file_1r_1w_test_wrap.sv
- src/ff_register_file_1r_1w_all_test_wrap.sv
- target: all(any(all(not(fpga), not(verilator)), scm_use_latch_scm), not(scm_use_fpga_scm))
files:
- latch_scm/register_file_1r_1w_test_wrap.sv
- latch_scm/register_file_1w_64b_multi_port_read_32b_1row.sv
- latch_scm/register_file_1w_multi_port_read_1row.sv
- latch_scm/register_file_1r_1w_all.sv
- latch_scm/register_file_1r_1w_all_test_wrap.sv
- latch_scm/register_file_1r_1w_be.sv
- latch_scm/register_file_1r_1w.sv
- latch_scm/register_file_1r_1w_1row.sv
- latch_scm/register_file_1w_128b_multi_port_read_32b.sv
- latch_scm/register_file_1w_64b_multi_port_read_32b.sv
- latch_scm/register_file_1w_64b_1r_32b.sv
- latch_scm/register_file_1w_multi_port_read_be.sv
- latch_scm/register_file_1w_multi_port_read.sv
- latch_scm/register_file_2r_1w_asymm.sv
- latch_scm/register_file_2r_1w_asymm_test_wrap.sv
- latch_scm/register_file_2r_2w.sv
- latch_scm/register_file_3r_2w.sv
- latch_scm/register_file_3r_2w_be.sv
- latch_scm/register_file_multi_way_1w_64b_multi_port_read_32b.sv
- latch_scm/register_file_multi_way_1w_multi_port_read.sv
- target: all(any(fpga, verilator, scm_use_fpga_scm), not(scm_use_latch_scm))
files:
- fpga_scm/register_file_1r_1w_all.sv
- fpga_scm/register_file_1r_1w_be.sv
- fpga_scm/register_file_1r_1w.sv
- fpga_scm/register_file_1r_1w_1row.sv
- fpga_scm/register_file_1r_1w_raw.sv
- fpga_scm/register_file_1w_multi_port_read.sv
- fpga_scm/register_file_1w_64b_multi_port_read_32b.sv
- fpga_scm/register_file_1w_64b_1r_32b.sv
- fpga_scm/register_file_2r_1w_asymm.sv
- fpga_scm/register_file_2r_1w_asymm_test_wrap.sv
- fpga_scm/register_file_2r_2w.sv
- fpga_scm/register_file_3r_2w.sv
|
public_folder: ./content
upload_folder: ./content/uploads
database_type: postgresql
database_user: launchsip
database_password: <PASSWORD>
database_host: localhost
database_path: launchsip
|
# Copyright (C) 2019-2021 The SymbiFlow Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
name: swerv
description: Full swerv core test
top_module: tb_top
tags: swerv
path: third_party/cores/swerv
command: fusesoc --cores-root third_party/cores/swerv run --target=sim --setup --build-root build/swerv_sim chipsalliance.org:cores:SweRV_EH1:1.8
conf_file: build/swerv_sim/sim-verilator/chipsalliance.org_cores_SweRV_EH1_1.8.vc
test_file: swerv-sim.sv
timeout: 180
compatible-runners: verilator-uhdm verilator slang
type: parsing elaboration simulation_without_run
|
<filename>src_files.yml
riscv_regfile_rtl:
targets: [
rtl,
tsmc55,
gf22,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/register_file_test_wrap.sv,
./rtl/riscv_register_file.sv,
]
riscv:
vlog_opts: [
-L fpnew_lib,
]
incdirs: [
./rtl/include,
../../rtl/includes,
]
files: [
./rtl/include/apu_core_package.sv,
./rtl/include/riscv_defines.sv,
./rtl/include/riscv_ascon_defines.sv,
./rtl/include/riscv_tracer_defines.sv,
./rtl/riscv_alu.sv,
./rtl/riscv_alu_div.sv,
./rtl/riscv_ascon.sv,
./rtl/riscv_compressed_decoder.sv,
./rtl/riscv_controller.sv,
./rtl/riscv_cs_registers.sv,
./rtl/riscv_decoder.sv,
./rtl/riscv_int_controller.sv,
./rtl/riscv_ex_stage.sv,
./rtl/riscv_hwloop_controller.sv,
./rtl/riscv_hwloop_regs.sv,
./rtl/riscv_id_stage.sv,
./rtl/riscv_if_stage.sv,
./rtl/riscv_load_store_unit.sv,
./rtl/riscv_mult.sv,
./rtl/riscv_prefetch_buffer.sv,
./rtl/riscv_prefetch_L0_buffer.sv,
./rtl/riscv_core.sv,
./rtl/riscv_apu_disp.sv,
./rtl/riscv_fetch_fifo.sv,
./rtl/riscv_L0_buffer.sv,
./rtl/riscv_pmp.sv,
]
riscv_vip_rtl:
targets: [
rtl,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/riscv_tracer.sv,
./rtl/cv32e40p_sim_clock_gate.sv,
]
flags: [
skip_synthesis,
]
riscv_regfile_rtl:
targets: [
rtl,
tsmc55,
gf22,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/register_file_test_wrap.sv,
./rtl/riscv_register_file.sv,
]
riscv_regfile_verilator:
targets: [
verilator,
]
files: [
./rtl/riscv_register_file.sv,
]
riscv_regfile_fpga:
targets: [
xilinx,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/register_file_test_wrap.sv,
./rtl/riscv_register_file.sv,
]
tb_riscv:
sim_tools: [
questa
]
synth_tools: [
mentor
]
targets: [
rtl,
]
flags: [
skip_synthesis,
]
incdirs: [
tb/tb_riscv/include,
rtl/include,
]
files: [
tb/tb_riscv/include/perturbation_defines.sv,
tb/tb_riscv/riscv_simchecker.sv,
tb/tb_riscv/tb_riscv_core.sv,
tb/tb_riscv/riscv_perturbation.sv,
tb/tb_riscv/riscv_random_interrupt_generator.sv,
tb/tb_riscv/riscv_random_stall.sv,
]
|
<gh_stars>1-10
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: face landmark model. This model outputs five points, left-eye-center,
right-eye-center, nose, left-mouth-corner, right-mouth-corner.
input size: 96*72
float ops: 0.14G
task: face keypoints detection
framework: caffe
prune: 'no'
version: 2.0
files:
- name: cf_landmark_celeba_96_72_0.14G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_landmark_celeba_96_72_0.14G_2.0.zip
checksum: 5b3f5bd0840ce0289a3e8f562414672a
- name: face_landmark
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=face_landmark-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 5d570e07dbacbaeac74428c1ad3041c0
- name: face_landmark
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=face_landmark-vck190-r2.0.0.tar.gz
checksum: a8ce18d7bb5e7ce6658d9cde2ad7a58c
- name: face_landmark
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=face_landmark-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: 2fc3ef4e218760a4254bff0ff145be35
- name: face_landmark
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=face_landmark-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: d96081207e6eae8ab598cb8784f09911
- name: face_landmark
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=face_landmark-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: d6ab71de6539d627eb1e10e7bd373c35
- name: face_landmark
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=face_landmark-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: 45a57c7a7335324d955d30d6cda1f26b
- name: face_landmark
type: xmodel
board: u200-DPUCADF8H & u250-DPUCADF8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=face_landmark-u200-u250-r2.0.0.tar.gz
checksum: 69dd98e8ac333f152ee5e4dc9ce92817
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<filename>Verilog Source Code/FoldedRule90/SEFUAM_SRAM_Channel_Count_Experiment/hdc_sensor_fusion_sim.yml
sim.inputs.top_module: "hdc_sensor_fusion"
sim.inputs.tb_dut: "dut"
sim.inputs.tb_name: "hdc_sensor_fusion_tb"
sim.inputs.input_files_meta: "append"
sim.inputs.input_files:
- "src/SEFUAM_SRAM_Channel_Count_Experiment/hdc_sensor_fusion.sv"
- "src/SEFUAM_SRAM_Channel_Count_Experiment/hdc_sensor_fusion_tb.sv"
- "src/SEFUAM_SRAM_Channel_Count_Experiment/associative_memory.sv"
- "src/SEFUAM_SRAM_Channel_Count_Experiment/hv_binary_adder.sv"
- "src/SEFUAM_SRAM_Channel_Count_Experiment/fuser.sv"
- "src/SEFUAM_SRAM_Channel_Count_Experiment/spatial_encoder.sv"
- "src/SEFUAM_SRAM_Channel_Count_Experiment/temporal_encoder.sv"
- "src/SEFUAM_SRAM_Channel_Count_Experiment/hv_generator_serial_circular.sv"
- "src/SEFUAM_SRAM_Channel_Count_Experiment/memory_wrapper.sv"
sim.inputs:
timescale: "1ns/1ps"
options:
- "-notice"
- "-line"
- "-debug_pp"
- "-debug_all"
- "+v2k"
- "+lint=all,noVCDE"
- "+incdir+../../src/SEFUAM_SRAM_Channel_Count_Experiment"
- "+define+CLOCK_PERIOD=1100"
- "-sverilog"
execute_sim: true
execution_flags: ["+verbose=1"]
|
name: EggNetExtension
on: [push]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
# os: [ubuntu-latest, macOS-latest]
python-version: [3.6, 3.7, 3.8]
steps:
- uses: actions/checkout@v1
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
cd python
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install pytest tox
- name: Install SWIG (Linux)
if: runner.os == 'Linux'
run: |
# sudo apt-get install swig
mkdir swig
cd swig
wget http://prdownloads.sourceforge.net/swig/swig-4.0.1.tar.gz
tar -xf swig-4.0.1.tar.gz
cd swig-4.0.1
./configure
make
sudo make install
- name: Install SWIG (Mac)
if: runner.os == 'macOS'
run: brew install swig
- name: Install SWIG (Win)
if: runner.os == 'Windows'
run: choco install swig
- name: Build & Test with Pytest
if: runner.os != 'Windows'
run: |
cd python/EggNetExtension
python setup.py build_ext --build-lib .
python setup.py build_ext --inplace
python setup.py build
python setup.py sdist bdist_wheel
# tox
# pytest
- name: Test with pytest (Windoof)
if: runner.os == 'Windows'
run: |
cd python/EggNetExtension
# Use the minigw32 compiler because Microsoft is not able to implement the C standard
# python setup.py build_ext --global-option --compiler=mingw32 --build-lib .
python setup.py build
python setup.py build_ext sdist bdist_wheel
# tox
# pytest Tensorflow perevents any
- name: Test Install
run: |
cd python/EggNetExtension
python setup.py build_ext
python setup.py build_ext --build-lib ./EggNetExtension
python setup.py bdist_wheel
# Install from setup.py
pip install .
# ToDo: Check install from
- name: Upload Python Package
uses: actions/upload-artifact@v1
with:
name: EggNetExtension-${{ matrix.os }}-${{ matrix.python-version }}
path: python/EggNetExtension/dist
|
udma_filter:
files: [
rtl/udma_filter_au.sv,
rtl/udma_filter_bincu.sv,
rtl/udma_filter_rx_dataout.sv,
rtl/udma_filter_tx_datafetch.sv,
rtl/udma_filter_reg_if.sv,
rtl/udma_filter.sv,
]
|
#
# List of IPs and relative branch/commit-hash/tag.
# Uses the YAML syntax.
#
# Examples:
#
# or10n:
# commit: tags/PULP3_final
# domain: [cluster]
# udma:
# commit: 62b10440
# domain: [soc]
# axi_slice:
# commit: master
# domain: [soc,cluster]
# If a *tag* or *commit* is referenced, the IP will be in a
# state of DETACHED HEAD. Before committing any additional
# work, make sure to checkout a branch.
#
# APB IPs
apb/apb_node:
commit: 58e26e5ab10a366a1ac301b5bbfc07fe37f7fcc5
apb/apb_event_unit:
commit: af<PASSWORD>bd<PASSWORD>aa<PASSWORD>
apb/apb_fll_if:
commit: <PASSWORD>
apb/apb_gpio:
commit: 049532637f6d936140d7178af3656be427713c4e
apb/apb_i2c:
commit: 84855413cc2c8e70209ee7f168a0225d3c5914a1
apb/apb_pulpino:
commit: 5bb6bf07ebb2afb9fe14d047cc06eec1b2173551
apb/apb_spi_master:
commit: e631067b90a99ee794cf83fb43e70b574f172b2d
apb/apb_timer:
commit: e48c4284ca8fb4dc65ad681d1b882f48e5f0582a
apb/apb_uart:
commit: 3bc0006c070b0b9c18719586cab2845cd6e8b63c
apb/apb2per:
commit: 3d674fdae8ac7998892c9cb549d31075c463de84
# AXI IPs
axi/axi2apb:
commit: b4c915fcd9526ab6e7378a3e43555189c898b89a
axi/axi_mem_if_DP:
commit: 7054842d5b888794536339fef07f474b792e84f0
axi/axi_node:
commit: 7987b2da409f9fd293032d4bfee63492cd1929a7
axi/axi_slice:
commit: f31793494e1832a2ed73dad5e6c87575fca0ee7f
axi/axi_slice_dc:
commit: 53e61a33cb6f80ed8fd119e8add9bdd19ed1fcdd
axi/axi_spi_master:
commit: 273d3dc58b5696bfaf475501bf1024cfba1c8a7a
axi/axi_spi_slave:
commit: <PASSWORD>
axi/core2axi:
commit: <PASSWORD>1c259ed4b4c5bbfd12
adv_dbg_if:
commit: ec98d1af68677e5bcfc99b93fa575d45dcc4c42c
riscv:
commit: <PASSWORD>852ec<PASSWORD>95ef5da78f3<PASSWORD>70fb<PASSWORD>
alternatives: [or10n,riscv]
#or10n:
# commit: master
# alternatives: [or10n,riscv]
|
# Run the RISC-V compliance framework port to check current NEORV32 version
name: 'RISC-V Compliance'
on:
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
name: '🐧 Ubuntu-Latest'
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '🔧 Setup Environment Variables'
run: |
echo "$GITHUB_WORKSPACE/riscv/bin" >> $GITHUB_PATH
echo $GITHUB_WORKSPACE
- name: '🔧 Setup RISC-V GCC'
run: |
/bin/bash -c "chmod u+x ./.ci/install.sh && ./.ci/install.sh"
echo $GITHUB_WORKSPACE
- name: '🔧 Setup GHDL Simulator'
uses: ghdl/setup-ghdl-ci@nightly
with:
backend: gcc
- name: '⚙️ Run RISC-V Compliance Tests'
run: /bin/bash -c "chmod u+x ./riscv-compliance/run_compliance_test.sh && ./riscv-compliance/run_compliance_test.sh"
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: yolov2 detection on voc dataset.
input size: 448*448
float ops: 9.86G
task: detection
framework: darknet
prune: '0.71'
version: 1.4
files:
- name: dk_yolov2_voc_448_448_0.71_9.86G_1.4
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=dk_yolov2_voc_448_448_0.71_9.86G_1.4.zip
checksum: e58e249b4c8d811f92990f8d7554077a
- name: yolov2_voc_pruned_0_71
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov2_voc_pruned_0_71-zcu102_zcu104_kv260-r1.4.0.tar.gz
checksum: 463b48ff5bf497f99d9c38035d80fb89
- name: yolov2_voc_pruned_0_71
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov2_voc_pruned_0_71-vck190-r1.4.0.tar.gz
checksum: 4a44a33fb7ef45fb09e928d355b02409
- name: yolov2_voc_pruned_0_71
type: xmodel
board: vck5000
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov2_voc_pruned_0_71-vck5000-DPUCVDX8H-r1.4.0.tar.gz
checksum: db5b869e1208670de8fe85b80879499b
- name: yolov2_voc_pruned_0_71
type: xmodel
board: u50-DPUCAHX8H & u50lv-DPUCAHX8H & u280-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov2_voc_pruned_0_71-u50-u50lv-u280-DPUCAHX8H-r1.4.0.tar.gz
checksum: 3213c652f4ea754ac1c8cf85864c0262
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
name: "wmt_ende_best"
data:
src: "en"
trg: "de"
train: "test/data/en-de/train.tok.bpe"
dev: "test/data/en-de/dev.tok.bpe"
test: "test/data/en-de/test.tok.bpe"
level: "bpe"
lowercase: False
max_sent_length: 100
src_voc_min_freq: 0
src_voc_limit: 100000
trg_voc_min_freq: 0
trg_voc_limit: 100000
src_vocab: "test/data/en-de/vocab.txt"
trg_vocab: "test/data/en-de/vocab.txt"
testing:
beam_size: 5
alpha: 1.0
training:
random_seed: 42
optimizer: "adam"
learning_rate: 0.0002
learning_rate_min: 0.0000005
weight_decay: 0.0
clip_grad_norm: 1.0
batch_size: 4096
batch_type: "token"
scheduling: "plateau"
patience: 4
decrease_factor: 0.7
early_stopping_metric: "ppl"
epochs: 20
validation_freq: 8000
logging_freq: 1000
eval_metric: "bleu"
model_dir: "wmt_ende_best"
overwrite: False
shuffle: True
use_cuda: True
max_output_length: 100
print_valid_sents: [0, 1, 2]
model:
tied_embeddings: True
encoder:
rnn_type: "lstm"
embeddings:
embedding_dim: 512
scale: False
hidden_size: 1024
bidirectional: True
dropout: 0.2
num_layers: 4
decoder:
rnn_type: "lstm"
embeddings:
embedding_dim: 512
scale: False
emb_scale: False
hidden_size: 1024
dropout: 0.2
hidden_dropout: 0.2
num_layers: 4
input_feeding: True
init_hidden: "bridge"
attention: "bahdanau"
|
package:
name: tech_cells_generic
description: "Technology-agnostic building blocks."
dependencies:
common_verification: { git: "https://github.com/pulp-platform/common_verification.git", version: 0.1.1 }
sources:
- target: any(all(any(all(not(asic), not(fpga)), tech_cells_generic_include_tc_sram), not(tech_cells_generic_exclude_tc_sram)), verilator)
files:
# Level 0
- src/rtl/tc_sram.sv
- target: all(any(all(not(asic), not(fpga)), tech_cells_generic_include_tc_clk), not(tech_cells_generic_exclude_tc_clk))
files:
# Level 0
- src/rtl/tc_clk.sv
- target: all(any(fpga, tech_cells_generic_include_xilinx_xpm), not(tech_cells_generic_exclude_xilinx_xpm))
files:
- src/fpga/pad_functional_xilinx.sv
- src/fpga/tc_clk_xilinx.sv
- src/fpga/tc_sram_xilinx.sv
- target: all(any(not(synthesis), tech_cells_generic_include_pwr_cells), not(tech_cells_generic_exclude_pwr_cells))
files:
- src/tc_pwr.sv
- target: all(any(test, tech_cells_generic_include_tb_cells), not(tech_cells_generic_exclude_tb_cells))
files:
- test/tb_tc_sram.sv
|
<reponame>teratide/tydi-json<filename>.github/workflows/test.yml
name: Test
on:
push:
branches:
- master
pull_request:
jobs:
vhdl:
name: VHDL
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ghdl/setup-ghdl-ci@master
with:
backend: llvm
- uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip setuptools wheel
python -m pip install vhdeps
- name: vhdeps
run: vhdeps -i vhlib -i component -i test ghdl -- --pattern '*_tc' --pattern ':!*/vhlib/*'
|
register_interface:
vlog_opts:
- -L common_cells_lib
- -L axi_lib
incdirs:
- include
- ../pulp_platform_axi/include
- ../pulp_platform_common_cells/include
files:
# Level 0
- src/reg_intf.sv
# Level 1
- src/apb_to_reg.sv
- src/axi_to_reg.sv
- src/periph_to_reg.sv
- src/reg_cdc.sv
- src/reg_demux.sv
- src/reg_mux.sv
- src/reg_to_mem.sv
- src/reg_uniform.sv
# Level 2
- src/axi_lite_to_reg.sv
reggen_primitives:
vlog_opts:
- -L common_cells_lib
- -L axi_lib
incdirs:
- include
- ../pulp_platform_axi/include
- ../pulp_platform_common_cells/include
files:
- vendor/lowrisc_opentitan/src/prim_subreg.sv
- vendor/lowrisc_opentitan/src/prim_subreg_arb.sv
- vendor/lowrisc_opentitan/src/prim_subreg_ext.sv
- vendor/lowrisc_opentitan/src/prim_subreg_shadow.sv
register_interface_test:
vlog_opts:
- -L common_cells_lib
- -L axi_lib
incdirs:
- include
files:
- src/reg_test.sv
flags:
- skip_synthesis
|
<filename>Bender.yml
package:
name: apb_fll_if
dependencies:
apb: { git: "https://github.com/pulp-platform/apb.git", version: 0.2.1 }
## fll_behav: { git: "<EMAIL>:alsaqr-synthesis/fll_behav.git", version: 0.0.0 }
sources:
- src/fll_intf.sv
- src/apb_to_fll.sv
- target: test
files:
- test/apb_fll_tb.sv
|
module_name: oscillator
description: Interface for a generic oscillator. Note that there is an optional input, actl, which is a control input to adjust the output frequency of an oscillator. If this input exists, in either current or voltage domain, the test and the generated model will be checking Kvco, gain compression, etc. If not exists, the test and the generated model simply calculates how the output frequency changes with inputs.
pin:
vdd:
name: vdd
description: power supply
direction: input
datatype: pwl
vss:
name: vss
description: ground
direction: input
datatype: pwl
is_optional: True
actl:
name: actl
description: (optional) analog control of oscillation frequency (Kvco will be extracted wrt this input). You can add current contraint for CCO
direction: input
datatype: pwl
is_optional: True
outp:
name: outp
description: positive output clock
direction: output
datatype: logic
outn:
name: outn
description: (optional) negative output clock
direction: output
datatype: logic
is_optional: True
metric: # optional behaviors to be incorporated in a model
compression:
description: Incorporate gain compression behavior
modelparam:
jitter:
description: jitter in normalized phase ( [0.0,1.0) )
datatype: real
value: 0.0
etol_ph:
description: resolution of normalized phase ( [0.0,1.0) )
datatype: real
value: 0.01
etol_v:
description: voltage resolution
datatype: real
value: 0.005
etol_i:
description: current resolution
datatype: real
value: 1e-6
testparam:
vlog_timeunit:
description: verilog timeunit
value: 1ps
temperature:
description: temperature condition
value: 25.0
tol_freq:
description: tolerance of frequency
value: 100e6
est_freq:
description: representative output frequency to calculate ramp slope of a control voltage(current), i.e. actl. Meaningfule only when actl input exists
value: 100e6
test_max_sample:
description: estimated maximum number of test vectors for test1
value: 20
test_sensitivity:
description: sensitivity threshold for test1 in percent
value: 0.0
actl_min:
description: min value of actl to sweep. valid only if actl input exists
value: 0.0
actl_max:
description: max value of actl to sweep. valid only if actl input exists
value: 0.8
|
<reponame>honorpeter/fletcher
image: docker:latest
services:
- docker:dind
stages:
- check
- test
- examples
.env: &env
GHDL_IMAGE: ghdl/ghdl
GHDL_TAG: ubuntu18-llvm-5.0
variables:
<<: *env
.ghdl-check-job: &ghdl-check-job
stage: check
image: $GHDL_IMAGE:$GHDL_TAG
script:
- find hardware/vhdl -name "*.vhd" |
xargs ghdl -i -v --std=${STD:-08} |
grep entity |
sed -e 's/entity //' |
sed -e 's/ \*\*//' |
xargs -L 1 ghdl -m --std=${STD:-08} -frelaxed-rules --ieee=synopsys
check-vhdl-93c:
<<: *ghdl-check-job
variables:
<<: *env
STD: 93c
check-vhdl-08:
<<: *ghdl-check-job
test-vhdl-08:
<<: *ghdl-check-job
stage: test
allow_failure: true
script:
- find hardware -name "*.vhd" |
xargs ghdl -i -v --std=${STD:-08} |
grep entity |
grep _tb |
sed -e 's/entity //' |
sed -e 's/ \*\*//' |
xargs -i -t bash -c '
ghdl -m --std=${STD:-08} -frelaxed-rules --ieee=synopsys {};
ghdl -r --std=${STD:-08} --ieee=synopsys {} --stop-time=100ns'
examples-fletchgen-stringread:
image: docker:latest
stage: examples
script:
- docker build -t fletchgen .
- docker run -v `pwd`/hardware/test/fletchgen/stringread:/src -v `pwd`/hardware:/hardware -e "FLETCHER_HARDWARE_DIR=/hardware" fletchgen -i src/test.fbs -o src/test_wrapper.vhd -n test -w test_wrapper -s src/test.fbs -d src/test.rb --sim src/sim_top.vhd -x src/test.srec
- sed -i -e 's/"src\/test.srec"/"src\/test\/fletchgen\/stringread\/test.srec"/' hardware/test/fletchgen/stringread/sim_top.vhd
- docker run -v `pwd`/hardware:/src $GHDL_IMAGE:$GHDL_TAG bash -c "shopt -s globstar && ghdl -i /src/**/*.vhd && ghdl -m --ieee=synopsys sim_top && ghdl -r -v --ieee=synopsys sim_top --stop-time=1ms"
|
<reponame>siracusa-soc/hwpe-ctrl<gh_stars>1-10
package:
name: hwpe-ctrl
authors:
- "<NAME> <<EMAIL>>"
dependencies:
tech_cells_generic: { git: "https://github.com/pulp-platform/tech_cells_generic.git", version: 0.2.2 }
sources:
- include_dirs:
- rtl
files:
# Source files grouped in levels. Files in level 0 have no dependencies on files in this
# package. Files in level 1 only depend on files in level 0, files in level 2 on files in
# levels 1 and 0, etc. Files within a level are ordered alphabetically.
# Level 0
- rtl/hwpe_ctrl_interfaces.sv
- rtl/hwpe_ctrl_package.sv
# Level 1
- rtl/hwpe_ctrl_regfile_latch.sv
- rtl/hwpe_ctrl_seq_mult.sv
- rtl/hwpe_ctrl_uloop.sv
# Level 2
- rtl/hwpe_ctrl_regfile_latch_test_wrap.sv
# Level 3
- rtl/hwpe_ctrl_regfile.sv
# Level 4
- rtl/hwpe_ctrl_slave.sv
|
<filename>public/cloc-1.82/tests/outputs/issues/114/T1.yaml
---
# github.com/AlDanial/cloc
header :
cloc_url : github.com/AlDanial/cloc
cloc_version : 1.70
elapsed_seconds : 0.0196640491485596
n_files : 3
n_lines : 3
files_per_second : 152.56267807995
lines_per_second : 152.56267807995
report_file : outputs/issues/114/T1.yaml
issues/114/bar/bee/inner_most.js :
blank: 0
comment: 0
code: 1
language: JavaScript
issues/114/bar/under_Bar.js :
blank: 0
comment: 0
code: 1
language: JavaScript
issues/114/foo/under_foo.js :
blank: 0
comment: 0
code: 1
language: JavaScript
SUM:
blank: 0
comment: 0
code: 3
nFiles: 3
|
<filename>.github/workflows/generate-riscv.yml
name: Generate Riscv using VexRiscv, archive
on:
workflow_dispatch:
push:
branches: [ trunk ]
# Publish semver tags as releases.
tags: [ 'v*.*.*' ]
jobs:
generate-riscv:
name: Generate Riscv
runs-on: ubuntu-latest
container:
image: ghcr.io/base-band/docker-images/vex-ci-build:latest
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: check pwd
run: pwd
- name: check ls
run: ls
- name: make genriscv
run: make genriscv
- name: Archive
uses: actions/upload-artifact@v2
with:
name: xbb-hdl
path: hdl/riscv/hdl/generated/
|
steps:
- command: |
# tool setup
source /cad/modules/tcl/init/bash
module load base vcs xcelium/19.03.003 hspice spectre/18.10.314 verdi
# create virtual environment
python3.7 -m venv venv
source venv/bin/activate
# install python dependencies for testing
pip install "pytest<6"
pip install coverage pytest-pycodestyle
pip install --upgrade "mantle>=2.0.0"
pip install vcdvcd decorator kratos
pip install DeCiDa scipy numpy
# install fault
pip install -e .
# install kratos runtime
pip install kratos-runtime
# use the latest cmake
pip install cmake
# run tests
coverage run -m pytest --pycodestyle tests/ -v -r s
# upload coverage results
bash <(curl -s https://codecov.io/bash)
label: "test"
timeout_in_minutes: 60
agents:
fault2: "true"
- command: |
# set up environment
source /etc/environment
echo $$PATH
# create virtual environment
python3.7 -m venv venv
source venv/bin/activate
# install python dependencies for testing
pip install wheel
pip install "pytest<6"
pip install pytest-cov pytest-pycodestyle
pip install vcdvcd decorator kratos
pip install --upgrade "mantle>=2.0.0"
pip install DeCiDa scipy numpy
# use the latest cmake
pip install cmake
# install fault
pip install -e .
# run tests
pytest --pycodestyle --cov-report=xml --cov=fault tests/ -v -r s
# upload coverage results
bash <(curl -s https://codecov.io/bash)
# deactivate virtual environment
deactivate
label: "fpga_verif"
timeout_in_minutes: 60
agents:
fpga_verif: "true"
|
package:
name: udma_uart
authors:
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
dependencies:
udma_core: { git: "<EMAIL>:Alsaqr-platform/udma_core.git", version: 2.0.0 }
common_cells: { git: "<EMAIL>:pulp-platform/common_cells.git", version: 1.13.1 }
sources:
# Source files grouped in levels. Files in level 0 have no dependencies on files in this
# package. Files in level 1 only depend on files in level 0, files in level 2 on files in
# levels 1 and 0, etc. Files within a level are ordered alphabetically.
# Level 0
- rtl/udma_uart_reg_if.sv
- rtl/udma_uart_rx.sv
- rtl/udma_uart_tx.sv
# Level 1
- rtl/udma_uart_top.sv
|
<filename>hw/vendor/pulp_platform_tech_cells_generic/Bender.yml
package:
name: tech_cells_generic
description: "Technology-agnostic building blocks."
dependencies:
common_verification: { git: "https://github.com/pulp-platform/common_verification.git", version: 0.1.1 }
sources:
- target: any(all(rtl, simulation), verilator)
files:
# level 0
- src/rtl/tc_sram.sv
- target: all(fpga, xilinx)
files:
- src/fpga/tc_clk_xilinx.sv
- src/fpga/tc_sram_xilinx.sv
- target: not(all(fpga, xilinx))
files:
# Level 0
- src/rtl/tc_clk.sv
- target: not(synthesis)
files:
- src/tc_pwr.sv
- target: test
files:
- test/tb_tc_sram.sv
|
apiVersion: grafana.f110.dev/v1alpha1
kind: GrafanaUser
metadata:
name: hoge
namespace: grafana
labels:
instance: prod
spec:
email: <EMAIL>
admin: true
|
# OpTiMSoC requires Ubuntu 16.04
# Travis supports this OS only in a docker environment
# See Dockerfile for the full build instructions
sudo: required
language: cpp
services:
- docker
script:
- docker build .
|
<reponame>pan185/UnarySim<gh_stars>1-10
# This file defines single architecture set for tlut systolic array performance projection
- proj_16_16_bank8_block16
- proj_32_32_bank8_block16
- proj_64_64_bank8_block16
- proj_128_128_bank8_block16
|
<gh_stars>100-1000
package:
name: axi_node
description: "An efficient AXI4 interconnect"
dependencies:
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.10.0 }
axi: { git: "https://github.com/pulp-platform/axi.git", rev: 3330f59b8668d8913e9835ba6b58d09536d2f209 }
sources:
- src/apb_regs_top.sv
- src/axi_address_decoder_AR.sv
- src/axi_address_decoder_AW.sv
- src/axi_address_decoder_BR.sv
- src/axi_address_decoder_BW.sv
- src/axi_address_decoder_DW.sv
- src/axi_node_arbiter.sv
- src/axi_AR_allocator.sv
- src/axi_AW_allocator.sv
- src/axi_BR_allocator.sv
- src/axi_BW_allocator.sv
- src/axi_DW_allocator.sv
- src/axi_multiplexer.sv
- src/axi_node.sv
- src/axi_node_intf_wrap.sv
- src/axi_node_wrap_with_slices.sv
- src/axi_regs_top.sv
- src/axi_request_block.sv
- src/axi_response_block.sv
|
<gh_stars>1-10
# Copyright 2022 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
clic:
incdirs:
- ../common_cells/include
files:
# - src/gen/clic_reg_pkg.sv
# - src/gen/clic_reg_top.sv
# - src/gen/clic_reg_adapater.sv
- src/clic_reg_pkg.sv
- src/clic_reg_top.sv
- src/clic_reg_adapter.sv
- src/clic_gateway.sv
- src/clic_target.sv
- src/clic.sv
vlog_opts:
- -L register_interface_lib,
|
---
title: Understanding the Instruction Set
author:
- Felix "xq" Queißner
date: April 28, 2020
abstract: |
The SPU Mark II uses quite unique instruction set following a highly orthogonal programming style.
In contrast to most cpus on the market, the SPU Mark II is a stack machine. This means that all operations move data to or from the stack. This does not mean that this cpu has no registers. There are auxiliary registers like a stack pointer <code>SP</code> or the instruction pointer <code>IP</code>.
|
language: c
matrix:
include:
- os: linux
compiler: gcc
cache: apt
dist: xenial
- os: osx
compiler: clang
osx_image: xcode10.2
# workaround for https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798
env:
global:
- SHORT_COMMIT_HASH=`git rev-parse --short HEAD`
- VERSION_STRING=nightly-$SHORT_COMMIT_HASH
- BUILD_NAME="Ubertooth-`date +%Y-%m-%d`-$SHORT_COMMIT_HASH"
- ARTEFACT_BASE=$TRAVIS_BUILD_DIR/artefacts/
- ARTEFACT_PATH=$ARTEFACT_BASE/$BUILD_NAME
addons:
apt:
packages:
- libbluetooth-dev
- libusb-1.0-0-dev
- gcc-arm-none-eabi
- libnewlib-arm-none-eabi
- libstdc++-arm-none-eabi-newlib
homebrew:
taps: PX4/homebrew-px4
packages:
- gcc-arm-none-eabi
before_script:
# build and install latest libbtbb
- git clone https://github.com/greatscottgadgets/libbtbb.git
- mkdir libbtbb/build
- cd libbtbb/build
- cmake ..
- make
- sudo make install
- export CFLAGS="-Wall -Wextra -Werror -Wno-zero-length-array"
script:
# Host code
- mkdir $TRAVIS_BUILD_DIR/host/build
- cd $TRAVIS_BUILD_DIR/host/build
- cmake ..
- make
- sudo make install
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo ldconfig; fi
# Firmware
- cd $TRAVIS_BUILD_DIR/firmware
# Set version string
- sed -e "s/GIT_REVISION=\".*\"/GIT_REVISION=\"$VERSION_STRING\"/" -i".bak" common.mk
# RX only firmware
- mkdir rx_only
- DISABLE_TX=1 make bluetooth_rxtx
- mv bluetooth_rxtx/bluetooth_rxtx.bin rx_only/bluetooth_rx_only.bin
- mv bluetooth_rxtx/bluetooth_rxtx.dfu rx_only/bluetooth_rx_only.dfu
# Regular firmware
- make clean
- make bluetooth_rxtx
# Bootloader firmware
- cd bootloader
- make
after_success:
# Construct archive for deploying to ubertooth-nightlies
- mkdir -p $ARTEFACT_PATH/firmware-bin/
# Put libbtbb in to the archive
- cd $TRAVIS_BUILD_DIR/libbtbb/
- git archive --format=tar --prefix=libbtbb/ HEAD | (cd $ARTEFACT_PATH && tar xf -)
# Export Ubertooth git repo
- cd $TRAVIS_BUILD_DIR/
- git archive --format=tar HEAD | (cd $ARTEFACT_PATH && tar xf -)
# Set version string
- sed -e "s/set(RELEASE.*/set(RELEASE \"$VERSION_STRING\")/" -i".bak" $ARTEFACT_PATH/host/libubertooth/src/CMakeLists.txt
# Copy firmware to firmware-bin directory
- cp $TRAVIS_BUILD_DIR/firmware/rx_only/bluetooth_rx_only.bin $ARTEFACT_PATH/firmware-bin/
- cp $TRAVIS_BUILD_DIR/firmware/rx_only/bluetooth_rx_only.dfu $ARTEFACT_PATH/firmware-bin/
- ls $TRAVIS_BUILD_DIR/firmware/bluetooth_rxtx/
- cp $TRAVIS_BUILD_DIR/firmware/bluetooth_rxtx/bluetooth_rxtx.bin $ARTEFACT_PATH/firmware-bin/
- cp $TRAVIS_BUILD_DIR/firmware/bluetooth_rxtx/bluetooth_rxtx.dfu $ARTEFACT_PATH/firmware-bin/
- cp $TRAVIS_BUILD_DIR/firmware/bootloader/bootloader.bin $ARTEFACT_PATH/firmware-bin/
# Build the archive
- cd $ARTEFACT_BASE
- tar -cJvf $ARTEFACT_BASE/$BUILD_NAME.tar.xz $BUILD_NAME
deploy:
provider: script
skip-cleanup: true
script: bash $TRAVIS_BUILD_DIR/tools/deploy-nightly.sh
on:
branch: master
|
<reponame>bluetiger9/Vitis-AI
name: vitis-ai-optimizer_caffe
channels:
- conda-forge
- defaults
dependencies:
- python=3.7
- vai_optimizer_caffe_gpu
|
<reponame>jvanstraten/vhdmmio
metadata:
name: basic
fields:
- address: 0
name: ctrl
behavior: control
- address: 4
name: stat
behavior: status
- address: 0x1---
name: slv_a
behavior: axi
|
# This is a sample build configuration for Python.
# Check our guides at https://confluence.atlassian.com/x/x4UWN for more examples.
# Only use spaces to indent your .yml configuration.
# -----
# You can specify a custom docker image from Docker Hub as your build environment.
image: sagemath/sagemath
pipelines:
default:
- step:
script: # Modify the commands below to build your repository.
- export SAGE_ROOT=`sage -c "import os; print os.environ['SAGE_ROOT']"`
- . $SAGE_ROOT/local/bin/sage-env
- umask 000 # allow to run doctests in world writable directory
- pip install -r requirements.txt # install flake8
- flake8 estimator.py
- PYTHONIOENCODING=UTF-8 PYTHONPATH=`pwd` sage-runtests estimator.py
- PYTHONIOENCODING=UTF-8 PYTHONPATH=`pwd` sage-runtests README.rst
|
before_install:
- sudo add-apt-repository ppa:saltmakrell/ppa -y
- sudo apt-get update
- sudo apt-get install -y yosys
language: node_js
node_js:
- "8"
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: face quality model. This model outputs a face quality score, which is
usually used to filter low-quality faces to further improve face recognition accuracy.
input size: 80*60
float ops: 61.68M
task: face quality
framework: pytorch
prune: 'no'
version: 1.4
files:
- name: py_face-quality_80_60_61.68M_1.4
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_face-quality_80_60_61.68M_1.4.zip
checksum: fbbd98e015b5927f7c9426759b4deba7
- name: face-quality_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality_pt-zcu102_zcu104_kv260-r1.4.0.tar.gz
checksum: 751e3f137e75fe90328eff96137a4393
- name: face-quality_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality_pt-vck190-r1.4.0.tar.gz
checksum: 7dd34900afdbdff19ddd9232ece7d4b7
- name: face-quality_pt
type: xmodel
board: vck5000
download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality_pt-vck5000-DPUCVDX8H-r1.4.0.tar.gz
checksum: 7aa34cfb1577692399a2da45d06ba86c
- name: face-quality_pt
type: xmodel
board: u50-DPUCAHX8H & u50lv-DPUCAHX8H & u280-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality_pt-u50-u50lv-u280-DPUCAHX8H-r1.4.0.tar.gz
checksum: b6ac8056f364e6f212e30041293c87cb
- name: face-quality_pt
type: xmodel
board: u50-DPUCAHX8L & u50lv-DPUCAHX8L & u280-DPUCAHX8L
download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality_pt-u50-u50lv-u280-DPUCAHX8L-r1.4.0.tar.gz
checksum: 0bfdd5d08c05b59b009cd252072eb710
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
label-alias:
bug: 'Type: Bug'
feature_request: 'Type: Feature Request'
question: 'Type: Question'
|
dist: xenial
language: python
python:
- "3.6"
sudo: required
cache:
directories:
- ${TRAVIS_BUILD_DIR}/pycoreir
- ${TRAVIS_BUILD_DIR}/pysmt
- ${TRAVIS_BUILD_DIR}/coreir
- /home/travis/.smt_solvers/python-bindings-3.6
addons:
apt:
update: true
sources:
- ubuntu-toolchain-r-test
packages:
- g++-7
- clang
- wget
- unzip
- build-essential
- python3
- automake
- libgmp-dev
- curl
- nano
- python3-dev
- libboost-dev
- default-jdk
- libclang-dev
- llvm
- llvm-dev
- lbzip2
- libncurses5-dev
- python3-nose
- iverilog
- libreadline-dev
- libmpfr-dev
- libmpc-dev
install:
- bash ./scripts/travis_install.sh
- pip install -e .
- pysmt-install --msat --confirm-agreement --install-path solvers --bindings-path bindings
- chmod +x ./yosys
env:
- PATH="$PATH:." PYTHONPATH="${TRAVIS_BUILD_DIR}/pycoreir":"${TRAVIS_BUILD_DIR}/solvers/":"${TRAVIS_BUILD_DIR}/bindings:${PYTHONPATH}" LD_LIBRARY_PATH="${TRAVIS_BUILD_DIR}/coreir/lib":"${TRAVIS_BUILD_DIR}/bindings:${TRAVIS_BUILD_DIR}/solvers:${LD_LIBRARY_PATH}" COREIRCONFIG=g++-7 CC=gcc-7 CXX=g++-7
script: nosetests tests -vv
|
language: ruby
bundler_args: --without development
rvm:
- 1.9.3
- 2.0.0
gemfile:
- tests/Gemfile
branches:
only:
- master
- 2.x
script: cd tests; bundle exec rake
|
<gh_stars>100-1000
axi_slice:
files: [
src/axi_single_slice.sv,
src/axi_ar_buffer.sv,
src/axi_aw_buffer.sv,
src/axi_b_buffer.sv,
src/axi_r_buffer.sv,
src/axi_slice.sv,
src/axi_w_buffer.sv,
src/axi_slice_wrap.sv,
]
|
<gh_stars>100-1000
# Copyright 2020 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
cv32e40p_regfile_rtl:
targets: [
rtl,
tsmc55,
gf22,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e40p_register_file_ff.sv,
]
cv32e40p:
incdirs: [
./rtl/include,
../../rtl/includes,
]
files: [
./rtl/include/cv32e40p_apu_core_pkg.sv,
./rtl/include/cv32e40p_fpu_pkg.sv,
./rtl/include/cv32e40p_pkg.sv,
./bhv/include/cv32e40p_tracer_pkg.sv,
./rtl/cv32e40p_alu.sv,
./rtl/cv32e40p_alu_div.sv,
./rtl/cv32e40p_ff_one.sv,
./rtl/cv32e40p_popcnt.sv,
./rtl/cv32e40p_compressed_decoder.sv,
./rtl/cv32e40p_controller.sv,
./rtl/cv32e40p_cs_registers.sv,
./rtl/cv32e40p_decoder.sv,
./rtl/cv32e40p_int_controller.sv,
./rtl/cv32e40p_ex_stage.sv,
./rtl/cv32e40p_hwloop_regs.sv,
./rtl/cv32e40p_id_stage.sv,
./rtl/cv32e40p_if_stage.sv,
./rtl/cv32e40p_load_store_unit.sv,
./rtl/cv32e40p_mult.sv,
./rtl/cv32e40p_prefetch_buffer.sv,
./rtl/cv32e40p_prefetch_controller.sv,
./rtl/cv32e40p_obi_interface.sv,
./rtl/cv32e40p_aligner.sv,
./rtl/cv32e40p_sleep_unit.sv,
./rtl/cv32e40p_core.sv,
./rtl/cv32e40p_apu_disp.sv,
./rtl/cv32e40p_fifo.sv
]
cv32e40p_vip_rtl:
targets: [
rtl,
]
incdirs: [
./rtl/include,
]
files: [
./bhv/cv32e40p_sim_clock_gate.sv,
./bhv/cv32e40p_wrapper.sv,
./bhv/cv32e40p_tracer.sv,
./bhv/cv32e40p_core_log.sv,
./bhv/cv32e40p_apu_tracer.sv,
]
defines: [
CV32E40P_TRACE_EXECUTION,
CV32E40P_APU_TRACE
]
flags: [
skip_synthesis,
]
cv32e40p_regfile_rtl:
targets: [
rtl,
tsmc55,
gf22,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e40p_register_file_latch.sv,
]
cv32e40p_regfile_verilator:
targets: [
verilator,
]
files: [
./rtl/cv32e40p_register_file_ff.sv,
]
cv32e40p_regfile_fpga:
targets: [
xilinx,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e40p_register_file_ff.sv,
]
|
<filename>.github/workflows/unittest.yml
# ************************************************************************
# @author: <NAME>
# @copyright: Copyright 2021
# @credits: AKAE
#
# @license: BSDv3
# @maintainer: <NAME>
# @email: <EMAIL>
#
# @file: unittest.yml
# @date: 2021-08-25
#
# @brief: runs test and deploy
#
# ************************************************************************
name: Unittest
on:
push:
paths-ignore:
- 'doc/**'
- 'README.md'
jobs:
test:
runs-on: ubuntu-latest
env:
GHDL_OPTS: "--std=93c --ieee=synopsys --time-resolution=ps --workdir=./sim/work"
steps:
- uses: actions/checkout@v1
- uses: ghdl/setup-ghdl-ci@nightly
with:
backend: mcode
- name: Prepare
run: |
ghdl --version
mkdir -p ./sim/work
- name: eSpiStaticSlave
run: |
ghdl -a ${GHDL_OPTS} ./bfm/eSpiStaticSlave.vhd
ghdl -a ${GHDL_OPTS} ./tb/eSpiStaticSlave_tb.vhd
if [[ -z $(ghdl -r ${GHDL_OPTS} eSpiStaticSlave_tb -gDO_ALL_TEST=True | grep "Test SUCCESSFUL") ]]; then echo "[ FAIL ] eSpiStaticSlave"; exit 1; fi;
- name: eSpiMasterBfm
run: |
ghdl -a ${GHDL_OPTS} ./bfm/eSpiStaticSlave.vhd
ghdl -a ${GHDL_OPTS} ./bfm/eSpiMasterBfm.vhd
ghdl -a ${GHDL_OPTS} ./tb/eSpiMasterBfm_tb.vhd
if [[ -z $(ghdl -r ${GHDL_OPTS} eSpiMasterBfm_tb -gDO_ALL_TEST=True | grep "Test SUCCESSFUL") ]]; then echo "[ FAIL ] eSpiMasterBfm"; exit 1; fi;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.