Search is not available for this dataset
content
stringlengths
0
376M
language: cpp # run on new infrastructure sudo: false cache: apt: true directories: $RISCV # required packages to install addons: apt: sources: - ubuntu-toolchain-r-test packages: - gcc-4.8 - g++-4.8 - gperf - autoconf - automake - autotools-dev - libmpc-dev - libmpfr-dev - libgmp-dev - gawk - build-essential - bison - flex - texinfo - python-pexpect - libusb-1.0-0-dev - device-tree-compiler env: global: - RISCV="/home/travis/riscv_install" - PATH="/home/travis/riscv_install/bin:$PATH" branches: only: - master before_install: - export CXX=g++-4.8 CC=gcc-4.8 - ci/make-tmp.sh - export LIBRARY_PATH=$TRAVIS_BUILD_DIR/tmp/lib - export LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/tmp/lib - export C_INCLUDE_PATH=$TRAVIS_BUILD_DIR/tmp/include - export CPLUS_INCLUDE_PATH=$TRAVIS_BUILD_DIR/tmp/include - export VERILATOR_ROOT=$TRAVIS_BUILD_DIR/tmp/verilator-3.918/ stages: - compile - test jobs: include: - stage: compile script: - ci/build-riscv-gcc.sh - stage: test script: - ci/install-verilator.sh - ci/install-fesvr.sh - ci/build-riscv-tests.sh - make run-asm-tests-verilator verilator=$TRAVIS_BUILD_DIR/tmp/bin/verilator # extra time duing long builds install: travis_wait
config: regmap: read_filler: 3735928559 address_increment_mode: none address_increment_value: 4 address_alignment_mode: data_width address_alignment_value: 4 lb_bridge: type: apb docs: {} data_width: 32 address_width: 12 name: regs version: '1.0' register_reset: sync_pos regmap: - name: LEN description: Length of pulse address: 0 bfields: - name: LEN description: Length of pulse initial: 0 width: 32 lsb: 0 access: rw modifiers: [] - name: CNT description: Counter value address: 4 access_strobes: true bfields: - name: CNT description: Counter value initial: 0 width: 16 lsb: 0 access: rw modifiers: - hwu - name: START description: Start processes address: 8 bfields: - name: STA description: Start process A initial: 0 width: 1 lsb: 0 access: wo modifiers: - sc - name: STB description: Start process B initial: 0 width: 1 lsb: 8 access: wo modifiers: - sc - name: STC description: Start process C initial: 0 width: 1 lsb: 16 access: wo modifiers: - sc - name: KEY description: Secret key to start process initial: 0 width: 8 lsb: 24 access: wo modifiers: [] - name: STAT description: Status address: 16 bfields: - name: DIR description: Current direction initial: 0 width: 1 lsb: 0 access: ro modifiers: [] - name: STATE description: Current state initial: 0 width: 3 lsb: 3 access: ro modifiers: - hwu - name: CTL description: Control address: 32 bfields: - name: ENA description: Enable A initial: 0 width: 1 lsb: 1 access: rw modifiers: [] - name: INITB description: Initial value for B initial: 0 width: 8 lsb: 8 access: rw modifiers: [] - name: FLAG description: Flags address: 36 bfields: - name: EVA description: Event A initial: 0 width: 1 lsb: 0 access: rw modifiers: - hwu - w1tc - name: EVB description: Event B initial: 0 width: 1 lsb: 2 access: ro modifiers: - hwu - rtc - name: VERSION description: Current version address: 64 bfields: - name: MINOR description: Minor version initial: 35 width: 8 lsb: 0 access: ro modifiers: - const - name: MAJOR description: Major version initial: 2 width: 8 lsb: 16 access: ro modifiers: - const
<gh_stars>0 # @package _global_ do_blink: backend: vivado figure: fig2 sub_figure: a device: at200 part: xc7a200tiffg1156-1L num_luts: 12800 bft: bft16
# Run integration test commands - hosts: test_orchestrator tasks: - name: send command to start writing events to file shell: "{{ test_orchestrator_python_bin }} {{ test_orchestrator_script_dir }}/send-file-writer-start-cmd.py {{ integration_test_kafka_bootstrap_servers }} {{ kafka_to_nexus_job_pool_topic }} {{ kafka_to_nexus_command_topic }} 0.00001 '{{ kafka_to_nexus_data_dir }}/{{ integration_test_nexus_file_name }}'" args: chdir: "{{ test_orchestrator_script_dir }}" async: "{{ integration_test_max_async_timeout }}" poll: 0 register: file_writer_command - hosts: forwarder tasks: - name: send command to start forwarding EPICS PVs shell: "{{ forwarder_virtualenv_path }}/bin/python {{ forwarder_script_dir }}/integration_test_forwarder_start_cmd.py" - hosts: pipeline_data_generator tasks: - name: start efu data generation shell: "\ LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../lib \ ./{{ integration_test_efu_generator_bin }} \ -f {{ integration_test_efu_generator_data_file }} \ -i {{ integration_test_efu_generator_destination_ip }} \ -t {{ integration_test_efu_generator_throttle }} \ {{ integration_test_efu_generator_additional_args }}" args: chdir: "{{ event_formation_unit_base_dir }}/event-formation-unit/bin" async: "{{ integration_test_max_async_timeout }}" poll: 0 register: efu_generator - hosts: test_orchestrator tasks: - name: change value of EPICS PV shell: "\ {{ epics_base_dir }}/epics/bin/pvput SIMPLE:VALUE1 1; \ sleep 1; \ {{ epics_base_dir }}/epics/bin/pvput SIMPLE:VALUE1 2; \ sleep 1"
cache: key: ${CI_JOB_NAME} paths: - "sbt-cache/.ivy/cache" - "sbt-cache/.boot" - "sbt-cache/.sbtboot" - "sbt-cache/target" variables: GIT_SUBMODULE_STRATEGY: none SBT_VERSION: "0.13.9" SBT_OPTS: "-Dsbt.global.base=sbt-cache/.sbtboot -Dsbt.boot.directory=sbt-cache/.boot -Dsbt.ivy.home=sbt-cache/.ivy -Dsbt.repository.config=.gitlab-ci/repositories -Dsbt.override.build.repos=true" before_script: - which ssh-agent - eval $(ssh-agent -s) - cat "./.gitlab-ci/id_rsa" | tr -d '\r' | ssh-add - - mkdir -p ~/.ssh - chmod 700 ~/.ssh - echo $(ssh-keyscan git.tsinghua.edu.cn) >> ~/.ssh/known_hosts - git submodule sync --recursive - git submodule update --init --recursive stages: - build_chisel - build - test chisel: stage: build_chisel image: chisel script: - chmod +x ./build_chisel.sh - ./build_chisel.sh artifacts: paths: - thinpad_top.srcs/sources_1/new/Router.v bitstream: stage: build image: vivado2018:2018.3 script: - env - /opt/Xilinx/Vivado/2018.3/bin/vivado -mode tcl -source build.tcl thinpad_top.xpr - test -f thinpad_top.runs/impl_1/*.bit artifacts: paths: - thinpad_top.runs/impl_1/*.bit - thinpad_top.runs/impl_1/runme.log - thinpad_top.runs/synth_1/runme.log
<reponame>DaveMcEwan/svlint name: Update RULES.md on: [push, pull_request] jobs: build: strategy: matrix: os: [ubuntu-latest] rust: [stable] runs-on: ${{ matrix.os }} steps: - name: Setup Rust uses: hecrj/setup-rust-action@v1 with: rust-version: ${{ matrix.rust }} - name: Checkout uses: actions/checkout@v1 - name: Run mdgen run: cargo run --bin mdgen > RULES.md - name: Commit uses: EndBug/add-and-commit@v2.1.0 with: author_name: dalance author_email: <EMAIL> message: "Update RULES.md" path: "./" pattern: "RULES.md" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
package: name: hwpe-stream authors: - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" dependencies: tech_cells_generic: { git: "https://github.com/pulp-platform/tech_cells_generic.git", version: 0.2.2 } sources: - include_dirs: - rtl files: # Source files grouped in levels. Files in level 0 have no dependencies on files in this # package. Files in level 1 only depend on files in level 0, files in level 2 on files in # levels 1 and 0, etc. Files within a level are ordered alphabetically. # Level 0 - rtl/hwpe_stream_interfaces.sv - rtl/hwpe_stream_package.sv # Level 1 - rtl/basic/hwpe_stream_assign.sv - rtl/basic/hwpe_stream_buffer.sv - rtl/basic/hwpe_stream_demux_static.sv - rtl/basic/hwpe_stream_deserialize.sv - rtl/basic/hwpe_stream_fence.sv - rtl/basic/hwpe_stream_merge.sv - rtl/basic/hwpe_stream_mux_static.sv - rtl/basic/hwpe_stream_serialize.sv - rtl/basic/hwpe_stream_split.sv - rtl/fifo/hwpe_stream_fifo_ctrl.sv - rtl/fifo/hwpe_stream_fifo_scm.sv - rtl/streamer/hwpe_stream_addressgen.sv - rtl/streamer/hwpe_stream_addressgen_v2.sv - rtl/streamer/hwpe_stream_addressgen_v3.sv - rtl/streamer/hwpe_stream_sink_realign.sv - rtl/streamer/hwpe_stream_source_realign.sv - rtl/streamer/hwpe_stream_strbgen.sv - rtl/streamer/hwpe_stream_streamer_queue.sv - rtl/tcdm/hwpe_stream_tcdm_assign.sv - rtl/tcdm/hwpe_stream_tcdm_mux.sv - rtl/tcdm/hwpe_stream_tcdm_mux_static.sv - rtl/tcdm/hwpe_stream_tcdm_reorder.sv - rtl/tcdm/hwpe_stream_tcdm_reorder_static.sv # Level 2 - rtl/fifo/hwpe_stream_fifo_earlystall.sv - rtl/fifo/hwpe_stream_fifo_earlystall_sidech.sv - rtl/fifo/hwpe_stream_fifo_scm_test_wrap.sv - rtl/fifo/hwpe_stream_fifo_sidech.sv # Level 3 - rtl/fifo/hwpe_stream_fifo.sv - rtl/tcdm/hwpe_stream_tcdm_fifo_load_sidech.sv # Level 4 - rtl/streamer/hwpe_stream_source.sv - rtl/tcdm/hwpe_stream_tcdm_fifo.sv - rtl/tcdm/hwpe_stream_tcdm_fifo_load.sv - rtl/tcdm/hwpe_stream_tcdm_fifo_store.sv # Level 5 - rtl/streamer/hwpe_stream_sink.sv
configName: default folderName: '' format_version: v1.0 frameworkPath: ../../../../MicrochipHarmony processor: PIC32MX250F128B projectName: '' settings: PackageFilter: crypto;net;core;bootloader_apps_usb;bootloader;bsp;quick_docs;usb;csp;core_apps_pic32mx;csp_apps_pic32mx; FORCE_OPTIMIZATION: 'false' GENERATE_BACKUP: 'true' CMSISPath: ./dev_packs/arm/CMSIS/5.7.0 MERGE_STRATEGY: USER_ALL DFPPath: ./dev_packs/Microchip/PIC32MX_DFP/1.4.241/atdf/PIC32MX250F128B.atdf
# Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: Multi-task for detection & segmentation on BDD100K & Cityscapes & Waymo. input size: 320*512 float ops: 13.65G task: object detection & segmetation framework: pytorch prune: 'no' version: 2.0 files: - name: pt_MT-resnet18_mixed_320_512_13.65G_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_MT-resnet18_mixed_320_512_13.65G_2.0.zip checksum: aff6ea0c8194811a82cac7397ab8a470 - name: MT-resnet18_mixed_pt type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=MT-resnet18_mixed_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: b5b8a03be20a2bc6a9c0b7e7c61f40ea - name: MT-resnet18_mixed_pt type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=MT-resnet18_mixed_pt-vck190-r2.0.0.tar.gz checksum: 75a4b58b4bac9abd042534a3bb8e0a64 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
package: name: ape_core authors: - "<NAME> <<EMAIL>>" # current maintainer dependencies: #common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.20.0 } #common_verification: { git: "https://github.com/pulp-platform/common_verification.git", version: 0.1.1 } export_include_dirs: - include sources: # Source files grouped in levels. Files in level 0 have no dependencies on files in this # package. Files in level 1 only depend on files in level 0, files in level 2 on files in # levels 1 and 0, etc. Files within a level are ordered alphabetically. # Level 0 - src/ape_pkg.sv # Level 1 - src/ape_core.sv # Level 2
name: Build examples using YoWASP on: schedule: - cron: '0 0 * * 0' jobs: examples: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Python 3.x uses: actions/setup-python@v2 with: python-version: '3.9' - name: Display Python version run: python -c "import sys; print(sys.version)" - name: install packages run: | python --version pip --version pip install yowasp-yosys yowasp-nextpnr-gowin - name: build examples run: | cd examples YOSYS=yowasp-yosys NEXTPNR=yowasp-nextpnr-gowin make all - name: Archive artifact uses: actions/upload-artifact@v2 with: name: bitstreams path: examples/*.fs
<gh_stars>1-10 # RUN: lld -flavor darwin -arch x86_64 -dead_strip -export_dynamic %s -dylib %p/Inputs/libSystem.yaml -o %t.dylib -print_atoms | FileCheck -check-prefix=CHECK1 %s # RUN: lld -flavor darwin -arch x86_64 -export_dynamic -dead_strip %s -dylib %p/Inputs/libSystem.yaml -o %t.dylib -print_atoms | FileCheck -check-prefix=CHECK1 %s # RUN: lld -flavor darwin -arch x86_64 -dead_strip %s -dylib %p/Inputs/libSystem.yaml -o %t2.dylib -print_atoms | FileCheck -check-prefix=CHECK2 %s # # Test that -export_dynamic -dead-strip from removing globals. # --- defined-atoms: - name: def scope: global dead-strip: never - name: dead scope: global shared-library-atoms: - name: dyld_stub_binder load-name: /usr/lib/libSystem.B.dylib type: unknown ... # CHECK1: name: def # CHECK1: name: dead # CHECK2: name: def # CHECK2-NOT: name: dead
--- input_file : 07_merge_sort_node_single.akd output_file : ../07_merge_sort_node_single.md image_url : "Fig.1 マージソートノードと 4-way マージソートツリー" : "image/07_merge_sort_node_single_1.jpg" "Fig.2 マージソートノードの入出力" : "image/07_merge_sort_node_single_2.jpg" "Fig.3 マージソートノードの状態遷移(1)" : "image/07_merge_sort_node_single_3.jpg" "Fig.4 マージソートノードの状態遷移(2)" : "image/07_merge_sort_node_single_4.jpg" "Fig.5 マージソートノードの状態遷移(3)" : "image/07_merge_sort_node_single_5.jpg" link_list : - id : "「はじめに」" title: "「VHDL で書くマージソーター(はじめに)」" url : "./01_introduction.md" - id : "「ワードの定義」" title: "「VHDL で書くマージソーター(ワードの定義)」" url : "./02_word_package.md" - id : "「ワード比較器」" title: "「VHDL で書くマージソーター(ワード比較器)」" url : "./03_word_compare.md" - id : "「ソーティングネットワーク」" title: "「VHDL で書くマージソーター(ソーティングネットワーク)」" url : "./04_sorting_network.md" - id : "「バイトニックマージソート」" title: "「VHDL で書くマージソーター(バイトニックマージソート)」" url : "./05_bitonic_sorter.md" - id : "「バッチャー奇偶マージソート」" title: "「VHDL で書くマージソーター(バッチャー奇偶マージソート)」" url : "./06_oddeven_sorter.md" - id : "「シングルワード マージソート ノード」" title: "「VHDL で書くマージソーター(シングルワード マージソート ノード)」" url : "./07_merge_sort_node_single.md" - id : "「マルチワード マージソート ノード」" title: "「VHDL で書くマージソーター(マルチワード マージソート ノード)」" url : "./08_merge_sort_node_multi.md" - id : "「マージソート ツリー」" title: "「VHDL で書くマージソーター(マージソート ツリー)」" url : "./09_merge_sort_tree.md" - id : "「端数ワード処理」" title: "「VHDL で書くマージソーター(端数ワード処理)」" url : "./10_merge_sort_core_1.md" - id : "「ストリーム入力」" title: "「VHDL で書くマージソーター(ストリーム入力)」" url : "./11_merge_sort_core_2.md" - id : "「ストリームフィードバック」" title: "「VHDL で書くマージソーター(ストリームフィードバック)」" url : "./12_merge_sort_core_3.md" - id : "「ArgSort IP」" title: "「VHDL で書くマージソーター(ArgSort IP)」" url : "./13_argsort.md" - id : "「ArgSort-Ultra96」" title: "「VHDL で書くマージソーター(ArgSort-Ultra96)」" url : "https://github.com/ikwzm/ArgSort-Ultra96/blob/1.2.1/doc/ja/argsort-ultra96.md" - id : "「ArgSort-Kv260」" title: "「VHDL で書くマージソーター(ArgSort-Kv260)」" url : "https://github.com/ikwzm/ArgSort-Kv260/blob/1.2.1/doc/ja/argsort-Kv260.md" - id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」" title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」" url : "https://www.acri.c.titech.ac.jp/wordpress/archives/132" - id : "「VALID-then-READY」" title: "「VALID 信号と READY 信号によるハンドシェイクの注意点」" url : "https://qiita.com/ikwzm/items/9736b5547cb15309af5c" ---
cryoAsicGen1: enable: True ForceWrite: False EpixHRGen1Cryo: enable: True Clock Jitter Cleaner: enable: True RstL: True Dec: False Inc: False Frqtbl: False FrqtblZ: True Rate: 0x0 RateZ: 0x0 BwSel: 0x0 BwSelZ: 0x2 FreqSel: 0x2 FreqSelZ: 0x8 Sfout: 0x2 SfoutZ: 0x1
<reponame>mfkiwl/snitch # Copyright 2020 ETH Zurich and University of Bologna. # Solderpad Hardware License, Version 0.51, see LICENSE for details. # SPDX-License-Identifier: SHL-0.51 package: name: snitch_ssr authors: - <NAME> <<EMAIL>> - <NAME> <<EMAIL>> - <NAME> <<EMAIL>> dependencies: common_cells: {path: ../../vendor/pulp_platform_common_cells} register_interface: {path: ../../vendor/pulp_platform_register_interface} # Local dependencies. tcdm_interface: {path: ../../ip/tcdm_interface} export_include_dirs: - include sources: # Level 0: - src/snitch_ssr_pkg.sv - src/snitch_ssr_switch.sv - src/snitch_ssr_credit_counter.sv # Level 1: - src/snitch_ssr_indirector.sv - src/snitch_ssr_intersector.sv # Level 2 - src/snitch_ssr_addr_gen.sv # Level 3: - src/snitch_ssr.sv # Level 4: - src/snitch_ssr_streamer.sv - target: test files: # Level 0 - test/fixture_ssr.sv - test/fixture_ssr_streamer.sv # Level 1 - test/tb_simple_ssr.sv - test/tb_simple_ssr_streamer.sv
<filename>.travis.yml language: python install: pip install tox script: tox -e $BUILD_NAME stages: - test - deploy matrix: include: - &docker env: - BUILD_NAME=py27-acceptance-ghdl - DOCKER_IMAGE=mcode-2 services: docker language: minimal install: skip script: docker run --rm -tv $(pwd):/src -w /src vunit/dev:$DOCKER_IMAGE tox -e $BUILD_NAME - <<: *docker env: - BUILD_NAME=py38-acceptance-ghdl - DOCKER_IMAGE=llvm - <<: *docker env: - BUILD_NAME=py38-vcomponents-ghdl - DOCKER_IMAGE=mcode - env: BUILD_NAME=py38-fmt python: '3.8' script: tox -e $BUILD_NAME -- --check - env: BUILD_NAME=py38-lint dist: xenial python: '3.8' - env: BUILD_NAME=py27-unit python: '2.7' - env: BUILD_NAME=py35-unit python: '3.5' - env: BUILD_NAME=py38-unit dist: xenial python: '3.8' - env: BUILD_NAME=py27-docs python: '2.7' before_script: git fetch --unshallow --tags - env: BUILD_NAME=py38-docs python: '3.8' before_script: git fetch --unshallow --tags after_success: touch .tox/${BUILD_NAME}/tmp/docsbuild/.nojekyll deploy: provider: pages repo: VUnit/VUnit.github.io target_branch: master local_dir: .tox/${BUILD_NAME}/tmp/docsbuild/ # This environment variable is set to an OAuth token in travis vunit settings github_token: $GITHUB_PAGES_TOKEN skip_cleanup: true on: repo: VUnit/vunit branch: master # Deploy to PyPI whenever the package version has changed # When a package version has not changed a new upload will not be triggered - stage: deploy python: '3.8' if: tag IS present script: - git fetch --unshallow --tags - python tools/release.py validate deploy: provider: pypi distributions: sdist skip_cleanup: true skip_upload_docs: true user: $PYPI_USER password: <PASSWORD> on: repo: VUnit/vunit all_branches: true
apiVersion: grafana.f110.dev/v1alpha1 kind: Grafana metadata: name: grafana namespace: grafana spec: adminUser: admin adminPasswordSecret: name: <PASSWORD>-password key: password service: name: grafana userSelector: matchLabels: instance: prod
blank_issues_enabled: false contact_links: - name: BlackParrot Google Group url: https://groups.google.com/g/black-parrot about: For more general BlackParrot inquiries. e.g. "I'm trying to do XYZ, what are first steps?"
<reponame>pheonixo/file_info root: true parser: '@typescript-eslint/parser' env: node: true plugins: - '@typescript-eslint' extends: - eslint:recommended - plugin:@typescript-eslint/recommended - prettier rules: array-callback-return: error camelcase: error consistent-return: 0 eqeqeq: [error, always, 'null': ignore] no-constant-condition: [error, checkLoops: false] no-control-regex: 0 no-fallthrough: [error, commentPattern: fallthrough] no-implicit-globals: error no-template-curly-in-string: warn no-var: error prefer-const: [warn, destructuring: all] '@typescript-eslint/ban-ts-comment': off '@typescript-eslint/explicit-module-boundary-types': off '@typescript-eslint/no-explicit-any': off '@typescript-eslint/no-namespace': off '@typescript-eslint/no-unused-vars': [warn, argsIgnorePattern: '^_'] overrides: - files: [tests/**] env: jest: true rules: camelcase: 0
version: '3' services: vice: build: context: . working_dir: /home/pwn container_name: vice ports: - "992:80" command: "sleep infinity"
<filename>.gitlab-ci.yml<gh_stars>10-100 stages: - build - test - synthesis - program variables: XILINX_VIVADO_VER: "2020.2" XILINX_VIVADO: /non-free/Xilinx/Vivado/$XILINX_VIVADO_VER CI_REGISTRY: mohs.dhcp.lbl.gov CONTAINER_IMAGE: $CI_REGISTRY/testing_base CONTAINER_IM_IMAGE: $CI_REGISTRY/riscv_bloat DOCKER_HOST: tcp://docker:2375/ DOCKER_DRIVER: overlay2 image: $CONTAINER_IMAGE:$CI_COMMIT_REF_NAME services: - name: mohs.dhcp.lbl.gov/docker:20.10.11-dind command: ["--insecure-registry", "mohs.dhcp.lbl.gov"] alias: docker include: - local: .gitlab/ci/build.gitlab-ci.yml - local: .gitlab/ci/serial_io.gitlab-ci.yml - local: .gitlab/ci/soc.gitlab-ci.yml - local: .gitlab/ci/oscope.gitlab-ci.yml - local: .gitlab/ci/cmoc.gitlab-ci.yml - local: .gitlab/ci/comms_top.gitlab-ci.yml - local: .gitlab/ci/dsp.gitlab-ci.yml - local: .gitlab/ci/marble_family.gitlab-ci.yml leep_test: script: - cd projects/common && python3 -m unittest -v cordic_test: before_script: - cd cordic && make clean stage: test script: - make all freq_demo: stage: test script: - cd homeless/freq_demo && make make_docs: before_script: - cd build-tools/make-demo && make clean stage: test script: - make check && make makefile.md && cmp ../makefile.md makefile.md flake8: stage: synthesis script: - find . -name "*.py" | xargs flake8 badger_test: before_script: - cd badger/tests && make clean && sh ./tap-setup.sh stage: test script: - make && bash tftp_test.sh && bash speed_check.sh peripheral_test: before_script: - cd peripheral_drivers stage: test script: - make - cd idelay_scanner && make ; cd - - cd ds1822 && make ; cd - - cd i2cbridge && make badger_ac701: stage: synthesis before_script: - cd badger/tests && ls /non-free script: - PATH=$XILINX_VIVADO/bin:$PATH make ac701_rgmii_vtest.bit artifacts: name: "$CI_JOB_NAME-$CI_COMMIT_REF_NAME" expire_in: 2 days paths: - badger/tests/ac701_rgmii_vtest.bit badger_ac701_run: stage: program tags: - deploy dependencies: - badger_ac701 script: - cd badger/tests && test -r ac701_rgmii_vtest.bit && sh teststand_ac701.sh
<reponame>BearerPipelineTest/google-ctf # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. apiVersion: kctf.dev/v1 kind: Challenge metadata: name: memsafety spec: deployed: true powDifficultySeconds: 0 network: public: true healthcheck: # TIP: disable the healthcheck during development enabled: true
# @package _global_ defaults: - override /do_blink/fig1a@do_blink.fig.fig1a: ["7010", "7020", "at200"] - override /do_blink/fig2a@do_blink.fig.fig2a: ["1600", "3200", "6400", "12800"] - override /do_blink/spam_filter@do_blink.fig.spam_filter: [] - override /do_blink/digit_recognition@do_blink.fig.digit_recognition: [] - override /do_blink/rendering@do_blink.fig.rendering: [] - override /hydra/launcher: submitit_slurm do_blink: vpr_options: bb_factor: 20 acc_fac: 0.7 astar_fac: 1.8 initial_pres_fac: 2.828 pres_fac_mult: 1.2 max_criticality: 0.999 target_ext_pin_util.input: 1.0 target_ext_pin_util.output: 1.0 place_algorithm: criticality_timing hydra: launcher: cpus_per_task: 8 mem_per_cpu: 7500mb nodes: 1 timeout_min: 120
<gh_stars>1-10 # Check NEORV32 software framework and test processor name: Processor on: push: branches: - master paths: - 'rtl/**' - 'sw/**' - 'sim/**' pull_request: branches: - master paths: - 'rtl/**' - 'sw/**' - 'sim/**' workflow_dispatch: jobs: Processor: runs-on: ubuntu-latest name: '🐧 Ubuntu | Shell script' steps: - name: '🧰 Repository Checkout' uses: actions/checkout@v2 - name: '🔧 Setup Environment Variables' run: | echo "$GITHUB_WORKSPACE/riscv/bin" >> $GITHUB_PATH echo $GITHUB_WORKSPACE - name: '⚙️ Setup RISC-V GCC' run: | mkdir riscv curl -fsSL https://github.com/stnolting/riscv-gcc-prebuilt/releases/download/rv32i-2.0.0/riscv32-unknown-elf.gcc-10.2.0.rv32i.ilp32.newlib.tar.gz | \ tar -xzf - -C riscv ls -al riscv - name: '⚙️ Setup GHDL Simulator' uses: ghdl/setup-ghdl-ci@nightly with: backend: llvm - name: '🚧 Run Software Framework Tests' run: ./sw/example/processor_check/check.sh - name: '🚧 Run Processor Hardware Tests with shell script' run: ./sim/ghdl_sim.sh VUnit-Container: runs-on: ubuntu-latest name: '🛳️ Container | VUnit' steps: - name: '🧰 Repository Checkout' uses: actions/checkout@v2 - name: '🔧 Setup Environment Variables' run: | echo "$GITHUB_WORKSPACE/riscv/bin" >> $GITHUB_PATH echo $GITHUB_WORKSPACE - name: '⚙️ Setup RISC-V GCC' run: | mkdir riscv curl -fsSL https://github.com/stnolting/riscv-gcc-prebuilt/releases/download/rv32i-2.0.0/riscv32-unknown-elf.gcc-10.2.0.rv32i.ilp32.newlib.tar.gz | \ tar -xzf - -C riscv ls -al riscv - name: '⚙️ Build and install Processor Check software' run: | make -C sw/example/processor_check \ clean_all \ USER_FLAGS+=-DRUN_CHECK \ USER_FLAGS+=-DUART0_SIM_MODE \ MARCH=-march=rv32imac \ info \ all - name: '📤 Archive Processor Check application image' uses: actions/upload-artifact@v2 with: name: application path: rtl/core/neorv32_application_image.vhd - name: '🚧 Run Processor Hardware Tests with VUnit' uses: VUnit/vunit_action@master with: cmd: ./sim/run.py --ci-mode -v
# Adapted from Garnet and ButterPHY name: input_divider commands: - | mkdir -p outputs tar -xvf /home/sjkim85/dragonphy_tarballs/input_divider-latest.tar.gz -C outputs mv outputs/input_divider-0.1.0/* outputs/ outputs: - input_divider.lef - input_divider.gds - input_divider.spi - input_divider.version
<reponame>irichter/ucb-bar_testchipip # CircleCI Configuration File # version of circleci version: 2.1 # default execution env.s executors: main-env: docker: - image: ucbbar/chipyard-image:1.0.1 environment: JVM_OPTS: -Xmx3200m # Customize the JVM maximum heap limit # pipeline parameters parameters: # misc parameters tools-cache-version: type: string default: "v5" chipyard-cache-version: type: string default: "v15" # re-usable commands commands: toolchain-build: description: "Build a toolchain" parameters: tools-version: type: string steps: - checkout - run: name: Create hash of toolchains command: | .circleci/create-hash.sh - restore_cache: keys: - << parameters.tools-version >>-installed-<< pipeline.parameters.tools-cache-version >>-{{ checksum "../<< parameters.tools-version >>.hash" }} - run: name: Building << parameters.tools-version >> command: | .circleci/build-toolchains.sh << parameters.tools-version >> no_output_timeout: 120m - save_cache: key: << parameters.tools-version >>-installed-<< pipeline.parameters.tools-cache-version >>-{{ checksum "../<< parameters.tools-version >>.hash" }} paths: - "/home/riscvuser/<< parameters.tools-version >>-install" prepare-rtl: description: "Run the prepare step of RTL" parameters: tools-version: type: string default: "riscv-tools" config-key: type: string timeout: type: string default: "120m" steps: - checkout - run: name: Create hash of toolchains command: | .circleci/create-hash.sh - restore_cache: keys: - << parameters.tools-version >>-installed-<< pipeline.parameters.tools-cache-version >>-{{ checksum "../<< parameters.tools-version >>.hash" }} - restore_cache: keys: - chipyard-<< pipeline.parameters.chipyard-cache-version >>-{{ checksum "CHIPYARD.hash" }} - run: name: Building << parameters.config-key >> Config using Verilator command: .circleci/do-rtl-build.sh << parameters.config-key >> no_output_timeout: << parameters.timeout >> # set of jobs to run jobs: install-riscv-toolchain: executor: main-env steps: - toolchain-build: tools-version: "riscv-tools" prepare-build-environment: executor: main-env steps: - checkout - restore_cache: keys: - chipyard-<< pipeline.parameters.chipyard-cache-version >>-{{ checksum "CHIPYARD.hash" }} - run: name: Setup build environment command: | .circleci/prepare-for-rtl-build.sh no_output_timeout: 120m - run: name: Install Verilator command: | .circleci/install-verilator.sh - save_cache: key: chipyard-<< pipeline.parameters.chipyard-cache-version >>-{{ checksum "CHIPYARD.hash" }} paths: - "/home/riscvuser/chipyard" - "/home/riscvuser/verilator-install" run-unittests: executor: main-env steps: - prepare-rtl: config-key: "unittest" - run: name: Run synthesizable unit tests command: .circleci/run-tests.sh unittest # Order and dependencies of jobs to run workflows: version: 2 build-and-test-unittests: jobs: # build toolchain - install-riscv-toolchain # Setup build environment - prepare-build-environment # Prepare the verilator build and run tests - run-unittests: requires: - install-riscv-toolchain - prepare-build-environment
name: Test on: push: pull_request: workflow_dispatch: env: CI: true DOCKER_BUILDKIT: 1 jobs: pytest: runs-on: ubuntu-latest steps: - name: '🧰 Checkout' uses: actions/checkout@v2 with: submodules: recursive fetch-depth: 0 - name: '🛳️ Build osvb' run: docker build -t osvb -f .github/Dockerfile . - name: '🚧 Run tests' run: docker run --rm -v $(pwd):/src -w /src -e CI osvb pytest -v -s -ra test.py --color=yes sigrok: runs-on: ubuntu-latest steps: - name: '🧰 Checkout' uses: actions/checkout@v2 - name: '🚧 Generate waveforms' run: | cd sigrok/resolution for img in buster-mcode buster-llvm-7; do docker pull ghdl/ghdl:"$img" docker run --rm -tv $(pwd):/src -e CI ghdl/ghdl:"$img" /src/run.sh done cd ../hierarchy docker run --rm -tv $(pwd):/src -e CI ghdl/ghdl:buster-mcode /src/run.sh - name: '🛳️ Build docker image with libsigork and sigrok-cli' run: | cd sigrok docker build -t umarcor/sigrok . - name: '🚧 Run sigrok-cli' run: | cd sigrok docker run --rm -tv $(pwd):/src -e CI umarcor/sigrok /src/test.sh
<reponame>najeebafzal/lm_riscv_dv - test: cmark c_tests: directed_tests/c/cmark.c gcc_opts: -g -O3 -funroll-all-loops iterations: 1 rtl_test: core_base_test_benchmark - test: cmark_iccm c_tests: directed_tests/c/cmark_iccm.c gcc_opts: -g -O3 -funroll-all-loops iterations: 1 rtl_test: core_base_test_benchmark - test: c_sample c_tests: directed_tests/c/c_sample.c gcc_opts: -g -O3 -funroll-all-loops iterations: 1 rtl_test: core_base_test_benchmark - test: hello_world asm_tests: directed_tests/asm/hello_world.s iterations: 1 rtl_test: core_base_test_benchmark - test: hello_world_dccm asm_tests: directed_tests/asm/hello_world_dccm.s iterations: 1 rtl_test: core_base_test_benchmark - test: riscv_arithmetic_basic_test_0 asm_tests: directed_tests/asm/riscv_sample_test/riscv_arithmetic_basic_test_0.S iterations: 1 rtl_test: core_base_test_benchmark - test: riscv_arithmetic_basic_test description: > Arithmetic instruction test, no load/store/branch instructions gen_opts: > +instr_cnt=1000 +num_of_sub_program=0 +directed_instr_0=riscv_int_numeric_corner_stream,4 +no_fence=1 +no_data_page=1 +no_branch_jump=1 +boot_mode=m +no_csr_instr=1 iterations: 2 gen_test: riscv_instr_base_test rtl_test: core_base_test - test: riscv_load_store_instr_dccm_test description: > Random instruction stress test iterations: 1 gen_test: riscv_instr_base_test gen_opts: > +instr_cnt=1000 +num_of_sub_program=0 +directed_instr_0=riscv_load_store_rand_instr_stream,4 rtl_test: core_base_test - test: riscv_rand_instr_test description: > Random instruction stress test iterations: 2 gen_test: riscv_instr_base_test gen_opts: > +instr_cnt=10000 +num_of_sub_program=5 +directed_instr_0=riscv_load_store_rand_instr_stream,4 +directed_instr_1=riscv_loop_instr,4 +directed_instr_2=riscv_hazard_instr_stream,4 +directed_instr_3=riscv_load_store_hazard_instr_stream,4 +directed_instr_4=riscv_multi_page_load_store_instr_stream,4 +directed_instr_5=riscv_mem_region_stress_test,4 +directed_instr_6=riscv_jal_instr,4 rtl_test: core_base_test - test: riscv_jump_stress_test description: > Stress back-to-back jump instruction test iterations: 2 gen_test: riscv_instr_base_test gen_opts: > +instr_cnt=5000 +num_of_sub_program=5 +directed_instr_1=riscv_jal_instr,20 rtl_test: core_base_test - test: riscv_loop_test description: > Random instruction stress test iterations: 2 gen_test: riscv_instr_base_test gen_opts: > +instr_cnt=10000 +num_of_sub_program=5 +directed_instr_1=riscv_loop_instr,20 rtl_test: core_base_test - test: riscv_rand_jump_test description: > Jump among large number of sub-programs, stress testing iTLB operations. iterations: 2 gen_test: riscv_instr_base_test gen_opts: > +instr_cnt=10000 +num_of_sub_program=10 +directed_instr_0=riscv_load_store_rand_instr_stream,8 rtl_test: core_base_test
<gh_stars>0 ariane: incdirs: [ include, ] files: [ include/ariane_pkg.sv, include/nbdcache_pkg.sv, src/util/instruction_tracer_if.sv, src/util/instruction_tracer_pkg.sv, src/ariane.sv, src/alu.sv, src/branch_unit.sv, src/btb.sv, src/commit_stage.sv, src/compressed_decoder.sv, src/controller.sv, src/csr_buffer.sv, src/csr_regfile.sv, src/decoder.sv, src/ex_stage.sv, src/fetch_fifo.sv, src/fifo.sv, src/id_stage.sv, src/if_stage.sv, src/instr_realigner.sv, src/issue_read_operands.sv, src/issue_stage.sv, src/load_unit.sv, src/lsu.sv, src/lsu_arbiter.sv, src/mmu.sv, src/mult.sv, src/pcgen_stage.sv, src/ptw.sv, src/scoreboard.sv, src/store_buffer.sv, src/store_unit.sv, src/tlb.sv, src/debug_unit.sv, src/nbdcache.sv, src/miss_handler.sv, src/cache_ctrl.sv, src/perf_counters.sv, ] riscv_regfile_rtl: targets: [ rtl, gf22, ] incdirs: [ include, ] files: [ src/regfile.sv, src/util/gf22_sram.sv, ] riscv_regfile_fpga: targets: [ xilinx, ] incdirs: [ include, ] files: [ src/regfile_ff.sv, src/util/xilinx_sram.sv, ]
<gh_stars>10-100 derived_clks: clk_0: abspath: 'tb_i' gated_clk_req: 'clk_val_0' gated_clk: 'clk_0' clk_1: abspath: 'tb_i' gated_clk_req: 'clk_val_1' gated_clk: 'clk_1' osc_0: abspath: 'tb_i.osc_0' emu_clk: 'emu_clk' emu_rst: 'emu_rst' emu_dt: 'emu_dt' dt_req: 'dt_req' osc_1: abspath: 'tb_i.osc_1' emu_clk: 'emu_clk' emu_rst: 'emu_rst' emu_dt: 'emu_dt' dt_req: 'dt_req'
<reponame>ess-dmsc/dmg-build-scripts --- - hosts: packet-generator gather_facts: False tasks: - name: start packet generator command : "{{daemonize_cmd}} {{script_path}}/pktgen_fpga_config/rundemo" args: chdir: "{{script_path}}/pktgen_fpga_config/" become: True become_method: sudo tags: - generator
<filename>conf/do_blink/rendering/zculling_bot.yaml "zculling_bot": arch: "artix7_200t" device_family: "xc7a200t" device_name: "doblink-6-pblock-2" device_speed: "sbg484-1" device: "xc7a200t-doblink-6-pblock-2-roi-virt" board: "nexys_video" use_roi: "TRUE" timeout: 10000
language: c compiler: - clang - gcc before_install: sudo apt-get update install: sudo apt-get install bc gnutls-bin valgrind perl script: - cmake -D CMAKE_BUILD_TYPE:String="Check" . - make - make test - ( cd tests && ./compat.sh ) - ( cd tests && ./ssl-opt.sh ) - tests/scripts/test-ref-configs.pl
sudo: required dist: cosmic before_install: - sudo apt-get -qq update - sudo apt-get install -y --no-install-recommends texlive-latex-recommended texlive-fonts-recommended texlive-latex-extra texlive-bibtex-extra texlive-fonts-extra texlive-science - sudo apt-get install -y --no-install-recommends biber xzdec ghostscript graphviz script: - source ./bin/conf.sh - make clean doc
# Copyright Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ================================================================================ # Regression test list format # -------------------------------------------------------------------------------- # test : Assembly test name # description : Description of this test # gen_opts : Instruction generator options # iterations : Number of iterations of this test # no_iss : Enable/disable ISS simulator (Optional) # gen_test : Test name used by the instruction generator # rtl_test : RTL simulation test name # cmp_opts : Compile options passed to the instruction generator # sim_opts : Simulation options passed to the instruction generator # no_post_compare : Enable/disable comparison of trace log and ISS log (Optional) # compare_opts : Options for the RTL & ISS trace comparison # gcc_opts : gcc compile options # -------------------------------------------------------------------------------- - test: riscv_rand_test description: > Random test with all useful knobs gen_opts: > +instr_cnt=10000 +num_of_sub_program=5 +illegal_instr_ratio=5 +hint_instr_ratio=5 +stream_name_0=riscv_load_store_rand_instr_stream +stream_freq_0=4 +stream_name_1=riscv_loop_instr +stream_freq_1=4 +stream_name_2=riscv_hazard_instr_stream +stream_freq_2=4 +stream_name_3=riscv_load_store_hazard_instr_stream +stream_freq_3=4 +stream_name_4=riscv_mem_region_stress_test +stream_freq_4=4 +stream_name_5=riscv_jal_instr +stream_freq_5=4 +dist_control_mode=1 +dist_shift=10 +dist_arithmetic=10 +dist_logical=10 +dist_compare=10 +dist_branch=10 +dist_synch=10 +dist_csr=10 iterations: 1 gcc_opts: > -mno-strict-align gen_test: riscv_ml_test rtl_test: core_base_test
# Copyright 2021 ETH Zurich and University of Bologna. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 --- ssr: num_dm: 3
name: 'coverage' on: push: schedule: - cron: '0 0 * * 5' jobs: coverage: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: submodules: recursive - uses: actions/setup-python@v2 with: python-version: '3.10' - name: Run coverage run: | ./.github/run.sh tox -e coverage ./.github/run.sh coverage html --directory=htmlcov - name: Report coverage run: ./.github/run.sh coverage report -m --skip-covered - uses: actions/upload-artifact@v2 with: name: VUnit_coverage path: htmlcov
language: cpp sudo: required dist: trusty before_install: - source travis-scripts/install-clutils.sh - source travis-scripts/install-savant.sh script: source travis-scripts/run.sh
# Copyright 2020 ETH Zurich # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. package: name: riscv dependencies: fpnew: { git: "https://github.com/pulp-platform/fpnew.git", version: 0.6.1 } common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.16.4 } tech_cells_generic: { git: "https://github.com/pulp-platform/tech_cells_generic.git", version: 0.1.1 } sources: include_dirs: - rtl/include files: - rtl/include/cv32e40p_apu_core_pkg.sv - rtl/include/cv32e40p_pkg.sv - rtl/cv32e40p_alu.sv - rtl/cv32e40p_alu_div.sv - rtl/cv32e40p_aligner.sv - rtl/cv32e40p_compressed_decoder.sv - rtl/cv32e40p_controller.sv - rtl/cv32e40p_cs_registers.sv - rtl/cv32e40p_decoder.sv - rtl/cv32e40p_int_controller.sv - rtl/cv32e40p_ex_stage.sv - rtl/cv32e40p_hwloop_controller.sv - rtl/cv32e40p_hwloop_regs.sv - rtl/cv32e40p_id_stage.sv - rtl/cv32e40p_if_stage.sv - rtl/cv32e40p_load_store_unit.sv - rtl/cv32e40p_mult.sv - rtl/cv32e40p_prefetch_buffer.sv - rtl/cv32e40p_obi_interface.sv - rtl/cv32e40p_core.sv - rtl/cv32e40p_apu_disp.sv - rtl/cv32e40p_fetch_fifo.sv - rtl/cv32e40p_popcnt.sv - rtl/cv32e40p_ff_one.sv - rtl/cv32e40p_sleep_unit.sv - target: asic files: - rtl/cv32e40p_register_file_latch.sv - target: not(asic) files: - rtl/cv32e40p_register_file_ff.sv - target: rtl files: - bhv/cv32e40p_sim_clock_gate.sv
<reponame>ucberkeley-ee290c/chipyard-osci-sky130 # General Hammer Inputs # Placement Constraints # vlsi.inputs.placement_constraints: - path: "Tile" type: toplevel x: 0 y: 0 width: 5000 height: 5000 margins: left: 0 right: 0 top: 0 bottom: 0 vlsi.inputs.dont_use_mode: append vlsi.inputs.dont_use_list: ["*sdf*"] # Scan flops goin haywire! # Technology Setup # Technology used is Skywater vlsi.core.technology: sky130 vlsi.core.node: 130 technology.sky130.sky130_pdk: "/tools/commercial/skywater/skywater-pdk" technology.sky130.sky130_nda: "/tools/commercial/skywater/swtech130/skywater-src-nda" technology.sky130.sky130A: "/tools/commercial/skywater/swtech130/local/sky130A/" technology.sky130.open_pdks: "/tools/commercial/skywater/swtech130/local/open_pdks" technology.sky130.sram_lib: "/tools/B/nayiri/sky130/skywater-pdk/libraries/sky130_fd_bd_sram" # technology.pdk_root: "/tools/B/nayiri/skywater/skywater-pdk" # technology.pdk_home: "/tools/commercial/skywater/swtech130/skywater-src-nda/s8/V2.0.1" vlsi.core.max_threads: 4 # General Hammer Inputs vlsi.inputs: supplies: power: [{name: "VDD", pin: "VPWR"}] ground: [{name: "VSS", pin: "VGND"}] VDD: "1.8 V" GND: "0 V" # Hammer will auto-generate a CPF for simple power designs; see hammer/src/hammer-vlsi/defaults.yml for more info vlsi.inputs.power_spec_mode: "auto" vlsi.inputs.power_spec_type: "cpf" # Specify the setup and hold corners for Skywater vlsi.inputs.mmmc_corners: [ {name: "sky130_fd_sc_hd__ss_100C_1v60", type: "setup", voltage: "1.60 V", temp: "100 C"}, {name: "sky130_fd_sc_hd__ff_n40C_1v95", type: "hold", voltage: "1.95 V", temp: "-40 C"} ] # Specify clock signals # ASAP7 bug: period value should actually be in ps vlsi.inputs.clocks: [ {name: "clock", period: "1000ns", uncertainty: "0.1ns"} ] # Power Straps par.power_straps_mode: generate par.generate_power_straps_method: by_tracks par.blockage_spacing: 2.0 par.generate_power_straps_options: by_tracks: strap_layers: - met2 - met3 - met4 - met5 pin_layers: - met4 - met5 track_width: 1 track_spacing: 0 track_start: 10 power_utilization: 0.2 # Pin placement constraints vlsi.inputs.pin_mode: generated vlsi.inputs.pin.generate_mode: semi_auto vlsi.inputs.pin.assignments: [ {pins: "*", layers: ["met3", "met5"], side: "left"} ] # Voltus options vlsi.core.power_tool: "voltus" vlsi.core.power_tool_path: ["hammer-cadence-plugins/power"] vlsi.core.power_tool_path_meta: "append" power.voltus.version: "181" # Tool options. Replace with your tool plugin of choice. # Genus options #vlsi.core.synthesis_tool: "dc" vlsi.core.synthesis_tool: "genus" vlsi.core.synthesis_tool_path: ["hammer-cadence-plugins/synthesis"] vlsi.core.synthesis_tool_path_meta: "append" synthesis.genus.version: "1813" #o-"191" # Innovus options vlsi.core.par_tool: "innovus" vlsi.core.par_tool_path: ["hammer-cadence-plugins/par"] vlsi.core.par_tool_path_meta: "append" par.innovus.version: "191" par.innovus.design_flow_effort: "standard" par.innovus.floorplan_mode: "auto" par.inputs.gds_merge: true # Calibre options vlsi.core.drc_tool: "calibre" vlsi.core.drc_tool_path: ["hammer-mentor-plugins/drc"] vlsi.core.lvs_tool: "calibre" vlsi.core.lvs_tool_path: ["hammer-mentor-plugins/lvs"] # drc.calibre.calibre_drc_bin: "/tools/mentor/calibre/current/bin/calibre" # VCS options vlsi.core.sim_tool: "vcs" vlsi.core.sim_tool_path: ["hammer-synopsys-plugins/sim"] vlsi.core.sim_tool_path_meta: "append" sim.vcs.version: "P-2019.06-SP2-5" #o-"G-2012.09" # Generate Make include to aid in flow vlsi.core.build_system: make
<reponame>EgorVorontsov/corsair<gh_stars>0 regmap: read_filler: '0xdeadbeef' address_increment_mode: none address_increment_value: 4 address_alignment_mode: data_width address_alignment_value: 4 lb_bridge: type: none docs: {} data_width: 32 address_width: 32 name: regs version: '1.0' register_reset: sync_pos
language: python dist: trusty sudo: required group: deprecated-2017Q4 # with new we have some random errors python: 3.6 install: - sudo sh scripts/install_ghdl.sh - source ~/.bashrc - pip install . # only for CI - pip install coveralls script: - coverage run --source pyha_demo_project -m pytest -s --nbval --nbdime --sanitize-with nbval_sanitize.cfg after_success: - coveralls
name: Wheels on: workflow_dispatch: schedule: - cron: '0 0 * * *' release: types: - published jobs: build_wheels: name: Wheels on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] steps: - uses: actions/checkout@v2 - name: Get Surelog version id: get-surelog run: | echo "::set-output name=version::$(git rev-parse HEAD:third_party/tools/surelog)" - uses: actions/cache@v2 id: surelog-cache if: matrix.os != 'ubuntu-latest' with: path: | siliconcompiler/tools/surelog/bin/surelog* siliconcompiler/tools/surelog/lib/surelog/sv/builtin.sv key: ${{ matrix.os }}-${{ steps.get-surelog.outputs.version }} - name: Setup env (Windows) if: matrix.os == 'windows-latest' run: | choco install -y graphviz winflexbison3 vcpkg install zlib zlib:x64-windows .github/workflows/bin/install_klayout_win.bat - name: Setup env (macOS) if: matrix.os == 'macos-latest' run: | brew install graphviz brew install --cask klayout # https://github.com/ponty/PyVirtualDisplay/blob/master/.github/workflows/main.yml#L45 brew install --cask xquartz echo "/opt/X11/bin" >> $GITHUB_PATH mkdir /tmp/.X11-unix sudo chmod 1777 /tmp/.X11-unix sudo chown root /tmp/.X11-unix - name: Build Surelog (Windows) if: matrix.os == 'windows-latest' && steps.surelog-cache.outputs.cache-hit != 'true' run: .github/workflows/bin/install_surelog_win.bat - name: Build Surelog (macOS) if: matrix.os == 'macos-latest' && steps.surelog-cache.outputs.cache-hit != 'true' run: .github/workflows/bin/install_surelog_macos.sh - uses: pypa/cibuildwheel@v2.1.1 env: CIBW_BEFORE_ALL_LINUX: > export PREFIX={package}/siliconcompiler/tools/surelog && {package}/.github/workflows/bin/setup_wheel_env_linux.sh CIBW_ENVIRONMENT_WINDOWS: SC_CMAKEARGS="-DCMAKE_TOOLCHAIN_FILE=$VCPKG_INSTALLATION_ROOT/scripts/buildsystems/vcpkg.cmake." CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014 CIBW_SKIP: "pp* *win32 *i686" MACOSX_DEPLOYMENT_TARGET: "10.15" CIBW_ARCHS_MACOS: x86_64 arm64 CIBW_TEST_SKIP: "*_arm64" CIBW_TEST_EXTRAS: test CIBW_TEST_COMMAND: > pytest --import-mode=append {package}/tests/ -m "not eda" && pytest --import-mode=append {package}/tests/tools/test_surelog.py && pytest --import-mode=append {package}/tests/core/test_show.py - name: Verify clean directory run: git diff --exit-code shell: bash - name: Upload wheels uses: actions/upload-artifact@v2 with: path: wheelhouse/*.whl publish: needs: [build_wheels] runs-on: ubuntu-latest if: github.event_name == 'release' && github.event.action == 'published' && !contains(github.event.release.body, 'NOPUBLISH') steps: - uses: actions/download-artifact@v2 with: name: artifact path: dist - uses: pypa/gh-action-pypi-publish@v1.4.2 with: user: __token__ password: ${{ secrets.PYPI_DEPLOY }}
<gh_stars>1-10 axi_slice: files: [ axi_ar_buffer.sv, axi_aw_buffer.sv, axi_b_buffer.sv, axi_buffer.sv, axi_r_buffer.sv, axi_slice.sv, axi_w_buffer.sv, ]
<reponame>SubjeBilisim/anasymod PROJECT: dt: 0.1e-6 board_name: PYNQ_Z1 plugins: ['msdsl'] emu_clk_freq: 10e6 FPGA_TARGET: fpga: fpga_sim_ctrl: 'UART_ZYNQ' custom_zynq_firmware: True
<filename>.github/workflows/main.yml name: CI on: push: branches: [ main ] pull_request: branches: [ main ] jobs: build-linux: runs-on: ubuntu-latest strategy: max-parallel: 5 steps: - uses: actions/checkout@v3 - name: Set up Python 3.8.1 uses: actions/setup-python@v3 with: python-version: 3.8.1 - name: Add conda to system path run: | echo $CONDA/bin >> $GITHUB_PATH - name: Install dependencies run: | conda env update --file environment.yml --name base - name: synthesis run: | cd validation/ conda install -c conda-forge gxx yosys-config --build cg_plugin.so clock_gating_plugin.cc python3 synth_test.py - name: validation run: | cd validation/ python3 validate.py
<filename>examples/stream_monitor/stream_monitor.mmio.yaml metadata: name: stream_monitor doc: | This register file can be used as an interface for monitoring the performance of a stream. features: bus-width: 32 optimize: yes entity: clock-name: axil_aclk reset-name: axil_aresetn reset-active: low bus-prefix: axil_ bus-flatten: yes interface: flatten: yes interrupts: - mnemonic: OVF name: overflow brief: counter overflow interrupt. doc: | This interrupt is triggered when one of the counters overflows. internal: overflow - mnemonic: VAL name: valid brief: stream valid interrupt. doc: | This interrupt is active when the stream is valid. internal: valid active: high - mnemonic: INV name: invalid brief: stream invalid interrupt. doc: | This interrupt is active when the stream is invalid. internal: valid active: low - mnemonic: RDY name: ready brief: stream ready interrupt. doc: | This interrupt is active when the stream is ready. internal: ready active: high - mnemonic: BSY name: busy brief: stream busy interrupt. doc: | This interrupt is active when the stream is not ready/busy. internal: ready active: low fields: - address: 0x0000 register-mnemonic: CSR register-name: ctrl_status_reg register-brief: control/status register. subfields: - bitrange: 0 mnemonic: VAL name: valid brief: reflects the stream's current valid signal. behavior: internal-status internal: valid - bitrange: 1 mnemonic: RDY name: ready brief: reflects the stream's current ready signal. behavior: internal-status internal: ready - bitrange: 8 mnemonic: OVF name: overflow brief: overflow flag. doc: | Set when one of the counters overflows. Write one to reset. behavior: interrupt-flag interrupt: overflow - bitrange: 9 mnemonic: OEN name: overflow_irq_enable brief: overflow interrupt enable flag. doc: | Whether a counter overflow triggers an interrupt. behavior: interrupt-unmask interrupt: overflow - bitrange: 10 mnemonic: VEN name: valid_irq_enable brief: stream valid interrupt enable flag. doc: | Whether the stream being valid triggers an interrupt. behavior: interrupt-unmask interrupt: valid - bitrange: 11 mnemonic: IEN name: invalid_irq_enable brief: stream invalid interrupt enable flag. doc: | Whether the stream being invalid triggers an interrupt. behavior: interrupt-unmask interrupt: invalid - bitrange: 12 mnemonic: REN name: ready_irq_enable brief: stream ready interrupt enable flag. doc: | Whether the stream being ready triggers an interrupt. behavior: interrupt-unmask interrupt: ready - bitrange: 13 mnemonic: BEN name: busy_irq_enable brief: stream busy interrupt enable flag. doc: | Whether the stream being busy/not ready triggers an interrupt. behavior: interrupt-unmask interrupt: busy - address: 0x0004 mnemonic: VCNT name: valid_count brief: stream valid cycle counter. doc: | Increments each cycle that the stream is valid. Writing to the register subtracts the written value. behavior: internal-counter internal: valid overflow-internal: overflow - address: 0x0008 mnemonic: RCNT name: ready_count brief: stream ready cycle counter. doc: | Increments each cycle that the stream is ready. Writing to the register subtracts the written value. behavior: internal-counter internal: ready overflow-internal: overflow - address: 0x000C mnemonic: ECNT name: element_count brief: stream element counter. doc: | Accumulates the number of elements transferred on the stream. behavior: counter hw-write: accumulate ctrl-increment: no internal-io: - direction: input internal: valid - direction: input internal: ready
<reponame>PRETgroup/modular-code-generation SA: type: Cell parameters: C14: 20 autorhythmic_rate: 100 BB: type: Cell parameters: C14: 20 LA: type: Cell parameters: C14: 20 LA1: type: Cell parameters: C14: 20 RA: type: Cell parameters: C14: 20 RA1: type: Cell parameters: C14: 20 CS: type: Cell parameters: C14: 20 CT: type: Cell parameters: C14: 20 CT1: type: Cell parameters: C14: 20 OS: type: Cell parameters: C14: 20 Fast: type: Cell parameters: C14: 18 Fast1: type: Cell parameters: C14: 18 Slow: type: Cell parameters: C14: 20 Slow1: type: Cell parameters: C14: 20 AV: type: Cell parameters: C14: 20 autorhythmic_rate: 60 His: type: Cell parameters: C14: 11 His1: type: Cell parameters: C14: 11 His2: type: Cell parameters: C14: 11 LBB: type: Cell parameters: C14: 11 autorhythmic_rate: 32 LBB1: type: Cell parameters: C14: 11 autorhythmic_rate: 32 LVA: type: Cell parameters: C14: 11 LV: type: Cell parameters: C14: 11 LV1: type: Cell parameters: C14: 11 LVS: type: Cell parameters: C14: 11 LVS1: type: Cell parameters: C14: 11 CSLV: type: Cell parameters: C14: 11 RBB: type: Cell parameters: C14: 11 autorhythmic_rate: 32 RBB1: type: Cell parameters: C14: 11 autorhythmic_rate: 32 RVA: type: Cell parameters: C14: 11 RV: type: Cell parameters: C14: 11 RV1: type: Cell parameters: C14: 11 RVS: type: Cell parameters: C14: 11 RVS1: type: Cell parameters: C14: 11
<gh_stars>10-100 # Copyright Google LLC # Copyright 2020 <NAME> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - tool: vcs compile: cmd: - "vcs -full64 -LDFLAGS '-Wl,--no-as-needed' -assert svaext -sverilog +error+500 <cov_opts> -timescale=1ns/10ps -f SweRV_EH1_flist.f -Mdir=<out>/vcs_simv.csrc -o <out>/vcs_simv -l <out>/compile.log -lca -kdb <cmp_opts> <wave_opts>" wave_opts: > -debug_access+all -ucli -do waves.tcl cov_opts: > -cm line+tgl+branch -cm_hier cover.cfg -cm_dir <out>/test.vdb sim: cmd: > env SIM_DIR=<sim_dir> <out>/vcs_simv +vcs+lic+wait <sim_opts> <wave_opts> <cov_opts> +tracer_file_base=<sim_dir>/trace_core -l <sim_dir>/sim.log wave_opts: > -ucli -do <cwd>/waves.tcl cov_opts: > -cm line+tgl+branch -cm_name test_<test_name>_<iteration> -cm_dir <out>/test.vdb - tool: verilator compile: cmd: - "verilator --cc -CFLAGS \"-std=c++11\" -Wno-UNOPTFLAT -I/testbench -f flist_verilator.f --top-module tb_top -exe test_tb_top.cpp --autoflush --trace -f testbench_veri.f" - "cp ./testbench/test_tb_top.cpp obj_dir" - "make -C obj_dir -f Vtb_top.mk OPT_FAST=\"-O2\"" sim: cmd: > ./obj_dir/Vtb_top +dumpon >><sim_dir>/sim.log env SIM_DIR=<sim_dir> <sim_opts> +tracer_file_base=<sim_dir>/trace_core -l <sim_dir>/sim.log
# Adapted from Garnet and ButterPHY name: output_buffer commands: - | mkdir -p outputs tar -xzvf /home/sjkim85/dragonphy_tarballs/output_buffer-latest.tar.gz -C outputs python comment_fill_bound_tap.py outputs/output_buffer.lvs.v mv outputs/output_buffer_alt.lvs.v outputs/output_buffer.lvs.v outputs: - output_buffer.lef - output_buffer.gds - output_buffer.lvs.v - output_buffer.version
timer_unit: incdirs: [ rtl, ] files: [ ./rtl/apb_timer_unit.sv, ./rtl/timer_unit.sv, ./rtl/timer_unit_counter.sv, ./rtl/timer_unit_counter_presc.sv, ] jg_slint_top_name: [ apb_timer_unit ] jg_slint_elab_opt: [ ] jg_slint_postelab_cmds: [ ] jg_slint_clocks: [ HCLK, ] jg_slint_resets: [ ~HRESETn, ]
<filename>emulator/.travis.yml language: cpp addons: apt: sources: - ubuntu-toolchain-r-test packages: - g++-5 - gcc-5 compiler: - gcc - clang os: - linux - osx matrix: exclude: - os: osx compiler: gcc - os: linux compiler: clang env: - SUBTARGET=tiny MAME=mametiny64 script: - if [ $TRAVIS_OS_NAME == 'linux' ]; then if [ $CC == 'clang' ]; then make -j2 IGNORE_GIT=1 OVERRIDE_CXX="clang++-3.6" OVERRIDE_CC="clang-3.6" && ./$MAME -validate; else make -j4 IGNORE_GIT=1 OPTIMIZE=0 OVERRIDE_CC="gcc-5" OVERRIDE_CXX="g++-5" && ./$MAME -validate; fi elif [ $TRAVIS_OS_NAME == 'osx' ]; then unset LDOPTS && make -j2 OPTIMIZE=0 USE_LIBSDL=1 && ./$MAME -validate; fi sudo: required before_install: - "if [ ${TRAVIS_OS_NAME:-'linux'} = 'linux' ]; then sudo add-apt-repository ppa:zoogie/sdl2-snapshots -y; fi" - "if [ ${TRAVIS_OS_NAME:-'linux'} = 'linux' ]; then sudo add-apt-repository ppa:beineri/opt-qt551-trusty -y; fi" - "if [ ${TRAVIS_OS_NAME:-'linux'} = 'linux' ]; then sudo apt-get update -qq; fi" - "if [ ${TRAVIS_OS_NAME:-'linux'} = 'linux' ]; then sudo apt-get install -y --force-yes libsdl2-dev libsdl2-ttf-dev libasound2-dev libxinerama-dev qt55base qt55quickcontrols qt55declarative qt55tools qt55svg; fi" - "if [ ${TRAVIS_OS_NAME:-'linux'} = 'linux' ]; then source /opt/qt55/bin/qt55-env.sh; fi" - "if [ ${TRAVIS_OS_NAME:-'linux'} = 'osx' ]; then brew update; fi" - "if [ ${TRAVIS_OS_NAME:-'linux'} = 'osx' ]; then brew install sdl2 sdl2_ttf; fi" branches: only: - master notifications: email: false
<filename>Bender.yml<gh_stars>0 package: name: FPnew authors: ["<NAME> <<EMAIL>>"] dependencies: common_cells: {git: "https://github.com/pulp-platform/common_cells.git", version: 1.7.5} fpu_legacy: {git: "<EMAIL>:sasa/fpu.git", rev: master} sources: - src/pkg/fpnew_pkg.vhd - src/pkg/fpnew_fmts_pkg.vhd - src/pkg/fpnew_comps_pkg.vhd - src/pkg/fpnew_pkg_constants.vhd - src/utils/fp_arbiter.vhd - src/utils/fp_pipe.vhd - src/utils/fp_rounding.vhd - src/ops/fma_core.vhd - src/ops/fp_fma.vhd - src/ops/fp_divsqrt_multi.vhd - src/ops/fp_noncomp.vhd - src/ops/fp_f2fcasts.vhd - src/ops/fp_f2icasts.vhd - src/ops/fp_i2fcasts.vhd - src/ops/fp_conv_multi.vhd - src/subunits/addmul_fmt_slice.vhd - src/subunits/addmul_block.vhd - src/subunits/conv_multifmt_slice.vhd - src/subunits/conv_block.vhd - src/subunits/divsqrt_block.vhd - src/subunits/divsqrt_multifmt_slice.vhd - src/subunits/divsqrt_block.vhd - src/subunits/noncomp_fmt_slice.vhd - src/subunits/noncomp_block.vhd - src/fpnew.vhd - src/fpnew_top.vhd
# Copyright Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ================================================================================ # Regression test list format # -------------------------------------------------------------------------------- # test : Assembly test name # description : Description of this test # gen_opts : Instruction generator options # iterations : Number of iterations of this test # no_iss : Enable/disable ISS simulator (Optional) # gen_test : Test name used by the instruction generator # asm_tests : Path to directed, hand-coded assembly test file or directory # rtl_test : RTL simulation test name # cmp_opts : Compile options passed to the instruction generator # sim_opts : Simulation options passed to the instruction generator # no_post_compare : Enable/disable comparison of trace log and ISS log (Optional) # compare_opts : Options for the RTL & ISS trace comparison # gcc_opts : gcc compile options # -------------------------------------------------------------------------------- - import: <riscv_dv_root>/yaml/base_testlist.yaml - test: riscv_non_compressed_instr_test description: > Random instruction test without compressed instructions iterations: 1 gen_test: riscv_rand_instr_test gen_opts: > +disable_compressed_instr=1 rtl_test: core_base_test - test: riscv_hint_instr_test description: > HINT instruction test, verify the processor can detect HINT instruction treat it as NOP. No illegal instruction exception is expected iterations: 2 gen_test: riscv_rand_instr_test gen_opts: > +hint_instr_ratio=5 rtl_test: core_base_test - test: riscv_pmp_test description: > Provide some PMP configuration parameters, and setup PMP CSRs appropriately iterations: 2 gen_test: riscv_rand_instr_test gen_opts: > +pmp_randomize=0 +pmp_num_regions=1 +pmp_granularity=1 +pmp_region_0=L:0,A:TOR,X:1,W:1,R:1,ADDR:FFFFFFFF rtl_test: core_base_test
<filename>docker-compose.yml # Docker Compose file Reference (https://docs.docker.com/compose/compose-file/) version: '3' # Define services services: # App Service bp: # Configuration for building the docker image for the service build: context: . # Use an image built from the specified dockerfile in the current directory. dockerfile: Dockerfile volumes: - .:/home/build/black-parrot
language: c os: - linux # - osx matrix: include: - os: linux env: VALGRIND="/usr/bin/valgrind --leak-check=full --error-exitcode=1" compiler: gcc - os: osx env: VALGRIND="" compiler: clang # compiler: # - clang # - gcc before_install: - if [ "$TRAVIS_OS_NAME" = "osx" ]; then brew update; fi - if [ "$TRAVIS_OS_NAME" = "osx" ]; then brew install doxygen; fi - if [ "$TRAVIS_OS_NAME" = "linux" ]; then sudo apt-get install -y valgrind; fi - if [ "$TRAVIS_OS_NAME" = "linux" ]; then sudo apt-get install -y doxygen; fi script: - make clean dev - make clean all - make docs after_failure: - echo "---- FULL LOGS ----" - cat tests/tests.log
<filename>blocks/regaccess/test/test_regaccess_demux.manifest.yaml module: test_regaccess_demux sources: - ../../../interfaces/common/dii_channel.sv - ../common/osd_regaccess_demux.sv toplevel: osd_regaccess_demux simulators: - vcs
<filename>examples/red-pitaya/spectrum/config.yml<gh_stars>10-100 --- name: spectrum board: boards/red-pitaya version: 0.1.2 memory: - name: control offset: '0x60000000' range: 4K - name: status offset: '0x50000000' range: 4K protection: read - name: dac[n_dac] offset: '0x80000000' range: 8K - name: spectrum offset: '0x86000000' range: 16K - name: demod offset: '0x88000000' range: 16K - name: noise_floor offset: '0x8A000000' range: 16K - name: peak_fifo offset: '0x43C10000' range: 64K - name: gpio offset: '0x41200000' range: 64K - name: xadc offset: '0x43C00000' range: 64K - name: rambuf offset: '0x1E000000' range: 8M control_registers: - led - laser_current - laser_control - power_setpoint - addr - substract_mean - ctl_fft - avg - dac_period[n_dac] - peak_address_low - peak_address_high - peak_address_reset - avg_period - avg_threshold - n_avg_min - eeprom_ctl status_registers: - spi_out - pid_control - n_avg - peak_address - peak_maximum - avg_ready - avg_on_out - eeprom_sts parameters: fclk0: 200000000 fclk1: 166666667 sampling_rate: 125000000 wfm_size: 4096 dac_width: 14 adc_width: 14 pwm_width: 12 n_pwm: 4 n_dac: 2 # ------------------------------------------------------------ # FPGA # ------------------------------------------------------------ modules: - fpga/modules/address/config.yml - fpga/modules/averager/config.yml - fpga/modules/spectrum/config.yml - fpga/modules/peak_detector/config.yml cores: - fpga/cores/axi_ctl_register_v1_0 - fpga/cores/axi_sts_register_v1_0 - fpga/cores/dna_reader_v1_0 - fpga/cores/redp_adc_v1_0 - fpga/cores/redp_dac_v1_0 - fpga/cores/pdm_v1_0 - fpga/cores/bus_multiplexer_v1_0 - fpga/cores/latched_mux_v1_0 - fpga/cores/saturation_v1_0 - fpga/cores/at93c46d_spi_v1_0 xdc: - boards/red-pitaya/config/ports.xdc - boards/red-pitaya/config/clocks.xdc - ./expansion_connector.xdc # ------------------------------------------------------------ # Drivers # ------------------------------------------------------------ drivers: - server/drivers/common.hpp - server/drivers/xadc.hpp - server/drivers/laser.hpp - server/drivers/modulation.hpp - server/drivers/eeprom.hpp - ./spectrum.hpp # ------------------------------------------------------------ # Web # ------------------------------------------------------------ web: - web/koheron.ts - web/jquery.flot.d.ts - web/laser.ts - web/laser-control.html - ./web/spectrum.ts - ./web/app.ts - web/main.css - web/average.ts - web/average.html - web/modulation.ts - web/modulation.html - ./web/plot.ts - web/plot-basics/plot-basics.ts - web/plot-basics/plot-basics.html - ./web/index.html
name: Linux Wheel Deploy on: push: tags: - '*' jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Checkout submodules shell: bash run: | auth_header="$(git config --local --get http.https://github.com/.extraheader)" git submodule sync --recursive git -c "http.extraheader=$auth_header" -c protocol.version=2 submodule update --init --force --recursive --depth=1 - name: Build wheel and test wheel shell: bash env: BUILD_WHEEL: true OS: linux PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | source ./scripts/ci.sh - name: Build and deploy other wheels shell: bash env: BUILD_WHEEL: true OS: linux run: | source ./scripts/deploy.sh - uses: actions/upload-artifact@v1 with: name: Python wheels path: wheelhouse/
<reponame>riscveval/Pulpino # # List of IPs and relative branch/commit-hash/tag. # Uses the YAML syntax. # # Examples: # # or10n: # commit: tags/PULP3_final # domain: [cluster] # udma: # commit: 62b10440 # domain: [soc] # axi_slice: # commit: master # domain: [soc,cluster] # If a *tag* or *commit* is referenced, the IP will be in a # state of DETACHED HEAD. Before committing any additional # work, make sure to checkout a branch. # # APB IPs apb/apb_node: commit: master apb/apb_event_unit: commit: master apb/apb_fll_if: commit: master apb/apb_gpio: commit: master apb/apb_i2c: commit: master apb/apb_pulpino: commit: master apb/apb_spi_master: commit: master apb/apb_timer: commit: master apb/apb_uart: commit: master apb/apb2per: commit: master # AXI IPs axi/axi2apb: commit: master axi/axi_mem_if_DP: commit: master axi/axi_node: commit: master axi/axi_slice: commit: master axi/axi_slice_dc: commit: master axi/axi_spi_master: commit: master axi/axi_spi_slave: commit: master axi/core2axi: commit: master adv_dbg_if: commit: master riscv: commit: master alternatives: [or10n,riscv] #or10n: # commit: master # alternatives: [or10n,riscv]
--- additions: - kind: dns#resourceRecordSet name: vrp2.ctfcompetition.com. rrdatas: - ns-cloud-c1.googledomains.com. cloud-dns-hostmaster.google.com. 2 21600 3600 259200 300 ttl: 21600 type: SOA deletions: - kind: dns#resourceRecordSet name: vrp2.ctfcompetition.com. rrdatas: - ns-cloud-c1.googledomains.com. cloud-dns-hostmaster.google.com. 1 21600 3600 259200 300 ttl: 21600 type: SOA
<reponame>mrv96/udma_core<filename>src_files.yml<gh_stars>1-10 udma_core: incdirs: [ ./rtl, ] files: [ rtl/common/io_clk_gen.sv, rtl/common/io_event_counter.sv, rtl/common/io_generic_fifo.sv, rtl/common/io_shiftreg.sv, rtl/common/udma_apb_if.sv, rtl/common/udma_clk_div_cnt.sv, rtl/common/udma_ctrl.sv, rtl/common/udma_dc_fifo.sv, rtl/core/udma_arbiter.sv, rtl/core/udma_ch_addrgen.sv, rtl/common/io_tx_fifo.sv, rtl/common/io_tx_fifo_dc.sv, rtl/common/io_tx_fifo_mark.sv, rtl/common/udma_clkgen.sv, rtl/core/udma_tx_channels.sv, rtl/core/udma_stream_unit.sv, rtl/core/udma_rx_channels.sv, rtl/core/udma_core.sv ]
<reponame>pulp-platform/ne16 ne16: vlog_opts: [ +nowarnSVCHK, -suppress 2275, -L hwpe_stream_lib, -L hwpe_ctrl_lib, -L hci_lib, ] incdirs: [ ., ../hwpe-stream/rtl, ../hwpe-ctrl/rtl, ] files: [ rtl/ne16_package.sv, rtl/accumulator/ne16_accumulator_scm_test_wrap.sv, rtl/input_buffer/ne16_input_buffer_scm_test_wrap.sv, rtl/accumulator/ne16_accumulator_scm.sv, rtl/accumulator/ne16_accumulator_normquant.sv, rtl/accumulator/ne16_normquant.sv, rtl/accumulator/ne16_normquant_shifter.sv, rtl/accumulator/ne16_normquant_bias.sv, rtl/accumulator/ne16_normquant_multiplier.sv, rtl/input_buffer/ne16_input_buffer_scm.sv, rtl/input_buffer/ne16_input_buffer.sv, rtl/array/ne16_scale.sv, rtl/array/ne16_binconv_block.sv, rtl/array/ne16_binconv_column.sv, rtl/array/ne16_binconv_array.sv, rtl/ctrl/ne16_ctrl_fsm.sv, rtl/ctrl/ne16_ctrl.sv, rtl/ne16_engine.sv, rtl/ne16_streamer.sv, rtl/ne16_top.sv, rtl/ne16_top_wrap.sv, ]
name: Benchmark on: push: branches: - master jobs: build: strategy: matrix: os: [ubuntu-latest] rust: [nightly] runs-on: ${{ matrix.os }} steps: - name: Setup Rust uses: hecrj/setup-rust-action@v1 with: rust-version: ${{ matrix.rust }} - name: Checkout uses: actions/checkout@v1 - name: Run benchmark run: cargo +nightly bench | tee output.txt - name: Fetch gh-pages for avoiding conflict run: git fetch 'https://dalance:${{ secrets.GITHUB_TOKEN }}@github.com/dalance/sv-parser.git' gh-pages:gh-pages - name: Store benchmark result uses: rhysd/github-action-benchmark@v1 with: name: Rust Benchmark tool: 'cargo' output-file-path: output.txt - name: Push benchmark result run: git push 'https://x-access-token:${{ secrets.PERSONAL_TOKEN }}@github.com/dalance/sv-parser.git' gh-pages:gh-pages
<gh_stars>1-10 --- algorithm: class: MuLambda comma_or_plus: plus population_size: 1000 lambda_size: 200 elite_size: 10 probabilities: crossover: 0.9 mutation: 0.01 injection: 0.9 termination: max_steps: 2000 on_individual: :stopping_condition init: method: ramped # grow or full or ramped sensible_depth: 7 inject: method: grow # grow or full or random sensible_depth: 7 grammar: class: Abnf::File filename: sample/ant_trail_tcc/grammar.abnf mapper: class: DepthLocus selection: class: RankSampling selection_rank: #optional class: Ranking elite_rank: class: Ranking crossover: class: CrossoverRipple margin: 2 #1 step: 2 mutation: class: MutationRipple store: class: Store filename: ./ant_mu_plus_lambda_tcc.store report: class: PopulationReport individual: class: PipedIndividual shorten_chromozome: true _pareto: :fitness: maximize _pipe_output: - :fitness: to_i _thresholds: :fitness: 89 _mark_phenotype: "\nMARKER\n" evaluator: class: WorkPipes commands: - 'ruby sample/ant_trail_tcc/ant_pipe.rb ONE sample/ant_trail_tcc/ant_evaluate.c' - 'ruby sample/ant_trail_tcc/ant_pipe.rb TWO sample/ant_trail_tcc/ant_evaluate.c' # for the remote pipe configuration, use: # # - 'ssh user@host "ruby /full/path/to/geret/sample/ant_trail_tcc/ant_pipe.rb ID sample/ant_trail_tcc/ant_evaluate.c"' # # note the ssh connection must use public/private key pair (no password) for authentication. # (see eg. http://www.petefreitag.com/item/532.cfm # or http://www.debian-administration.org/article/SSH_with_authentication_key_instead_of_password )
image: gitlab-registry.grahamedgecombe.com/gpe/icestorm-docker:latest stages: - build build: stage: build script: - make YS_ICE40=/usr/share/yosys/ice40/cells_sim.v syntax time stat
<filename>.github/workflows/release.yml name: release on: push: tags: - 'v*' jobs: build: runs-on: ${{ matrix.os }} strategy: matrix: os: - ubuntu-16.04 - ubuntu-18.04 - ubuntu-20.04 - windows-latest - macos-latest steps: - name: Checkout vparse uses: actions/checkout@v2 - uses: jiro4989/setup-nim-action@v1.1.4 with: nim-version: 'stable' - name: Build Python3 library for Ubuntu if: ${{ matrix.os == 'ubuntu-20.04' || matrix.os == 'ubuntu-18.04' || matrix.os == 'ubuntu-16.04'}} run: | nimble install -d -y nimble build_pylib source /etc/os-release PLATFORM_INFO="${ID}-${VERSION_ID}-$(uname -i)" ARCHIVE_FILENAME="vparse-$(cat src/vparsepkg/private/VERSION)-pylib-${PLATFORM_INFO}" tar -czf "${ARCHIVE_FILENAME}.tar.gz" vparse.so README.md LICENSE CHANGELOG.md THIRD_PARTY_LICENSES.md md5sum "${ARCHIVE_FILENAME}.tar.gz" > ${ARCHIVE_FILENAME}.tar.gz.md5 - name: Build Python3 library for macOS if: ${{ matrix.os == 'macos-latest' }} run: | nimble install -d -y nimble build_pylib ARCHIVE_FILENAME="vparse-$(cat src/vparsepkg/private/VERSION)-pylib-macos" tar -czf "${ARCHIVE_FILENAME}.tar.gz" vparse.so README.md LICENSE CHANGELOG.md THIRD_PARTY_LICENSES.md md5 "${ARCHIVE_FILENAME}.tar.gz" > ${ARCHIVE_FILENAME}.tar.gz.md5 - name: Build Python3 library for Windows if: ${{ matrix.os == 'windows-latest' }} run: | nimble install -d -y nimble build_pylib $ARCHIVE_FILENAME="vparse-$(cat src/vparsepkg/private/VERSION)-pylib-win_x86_64" 7z a -tzip "${ARCHIVE_FILENAME}.zip" vparse.pyd README.md LICENSE CHANGELOG.md THIRD_PARTY_LICENSES.md [string]$MD5 = (get-filehash -Algorithm MD5 "${ARCHIVE_FILENAME}.zip").Hash echo $MD5 > "${ARCHIVE_FILENAME}.zip.md5" - name: Upload artifacts uses: softprops/action-gh-release@v1 env: GITHUB_TOKEN: ${{ secrets.TOKEN }} with: draft: true files: | *.tar.gz *.tar.gz.md5 *.zip *.zip.md5
<filename>manifests/sample/harbor/robot_account.yaml apiVersion: harbor.f110.dev/v1alpha1 kind: HarborRobotAccount metadata: name: test spec: project_namespace: default project_name: test secret_name: docker-config-test
quicklogic: files: [ eFPGA_wrapper.sv, top.sv, A2_design.sv, top1_wrapper.sv, ./a2_math_unit.sv, ./ql_math_unit/rtl/DW02_mac.sv, ./ql_math_unit/rtl/mac_16bit.v, ./ql_math_unit/rtl/mac_32bit.v, ./ql_math_unit/rtl/mac_8bit.v, ./ql_math_unit/rtl/mac_4bit.v, ./ql_math_unit/rtl/mac_array.v, ./ql_math_unit/rtl/math_block.v, ./ql_math_unit/rtl/tpram_wrap.v, ./ql_math_unit/rtl/sram512x64.v, ./ql_fcb/rtl/baud_generator.v, ./ql_fcb/rtl/fcbaps.sv, ./ql_fcb/rtl/fcbclp.sv, ./ql_fcb/rtl/fcbfsr.sv, ./ql_fcb/rtl/fcbmic.sv, ./ql_fcb/rtl/fcbpif.sv, ./ql_fcb/rtl/fcbpmu.sv, ./ql_fcb/rtl/fcbrfu.sv, ./ql_fcb/rtl/fcbrfuwff.sv, ./ql_fcb/rtl/fcbrwf.sv, ./ql_fcb/rtl/fcbsmc.sv, ./ql_fcb/rtl/fcbssc.sv, ./ql_fcb/rtl/fcb.sv, ./ql_fcb/rtl/ql_generic_gates.sv, ./ql_fcb/rtl/registers.v, ./ql_fcb/rtl/serializer_deserializer.v, ./ql_fcb/rtl/spi_master_top.v, ./ql_fcb/rtl/SPI_slave.sv, ./ql_fcb/rtl/qf_aff2.sv, ./ql_fcb/rtl/qf_dff.sv, ./ql_fcb/rtl/qf_rfm_f.sv, ./ql_fcb/rtl/qf_rfm.sv, ./ql_fcb/rtl/qf_rhw.sv, ./ql_fcb/rtl/qf_rwhwsc.sv, ./ql_fcb/rtl/qf_rw.sv, ./ql_fcb/rtl/qf_sff.sv, ]
name: run-test on: [push] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 with: submodules: true - name: checkout prebuilt binary run: ./script/checkout_prebuilt_binary_for_ci.sh ${{ secrets.RAFI_PREBUILT_BINARY_USER }} ${{ secrets.RAFI_PREBUILT_BINARY_ACCESS_TOKEN }} - name: check revision run: (cd third_party/rafi-prebuilt-binary; git log -1) # Workaround for a bug of GitHub Actions # - https://github.com/actions/virtual-environments/issues/83 # - https://github.com/actions/virtual-environments/issues/110 - name: install additional packages run: > sudo apt-get install -y ninja-build libboost-filesystem1.65.1 libboost-program-options1.65.1 libboost1.65-dev && sudo ln -s /usr/lib/x86_64-linux-gnu/libboost_filesystem.so.1.65.1 /usr/lib/x86_64-linux-gnu/libboost_filesystem.so && sudo ln -s /usr/lib/x86_64-linux-gnu/libboost_program_options.so.1.65.1 /usr/lib/x86_64-linux-gnu/libboost_program_options.so - name: build_gtest run: ./script/build_gtest.sh - name: build_release run: ./script/build_release.sh - name: run rafi-emu unit test run: ./script/run_emu_test.sh Release - name: run riscv-tests on rafi-emu run: ./script/run_emu_riscv_tests.sh - name: run linux on emu run: ./script/run_emu_linux.sh - name: run rafi-sim unit test (SystemVerilog verification by verilator) run: ./script/run_vtest.sh Release - name: run riscv-tests on rafi-sim run: ./script/run_sim_riscv_tests.sh
# Adapted from Garnet name: constraints outputs: - constraints.tcl parameters: clock_period: 1.0 design_name: undefined
# Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 gen-weights: # Generators that can continue the program Branch: 0.1 CallStackRW: 0.1 EdgeLoadStore: 0.1 Jump: 0.1 Loop: 0.1 LoopDupEnd: 0.01 SmallVal: 0.2 StraightLineInsn: 1.0 KnownWDR: 0.05 # Generators that end the program ECall: 1 BadAtEnd: 1 BadBNMovr: 1 BadBranch: 1 BadCallStackRW: 1 BadDeepLoop: 1 BadLoadStore: 1 BadInsn: 1 BadGiantLoop: 1 BadZeroLoop: 1
<reponame>CSEYJ/Flightplan description: >- SIGCOMM submission -- plotting pings to get overhead of table offload experiment: offload repositories: P4Boosters: 52e70cc6 files: analysis.ipynb: plot_pings.ipynb baselinePingLog.txt: baselinePingLog.txt oneHopPingLog.txt: oneHopPingLog.txt documentation.md: offload.md
<reponame>PrincetonUniversity/prga context: ../../../../fpga/frame/grady18_N4_rom2K_8x8/ctx.pkl compiler: vcs app: name: romtest sources: - ../../src/romtest.v includes: - ../../src/include constraints: io: io.partial tests: romtest_test: sources: - ../../src/romtest_test.v
<reponame>shady831213/jarvism<gh_stars>1-10 env: simulator: type: "vcs" common_compile_option: &common_compile >- -sverilog -ntb_opts uvm-1.2 common_sim_option: &common_sim >- +UVM_VERBOSITY=UVM_LOW +UVM_CONFIG_DB_TRACE builds: build1: pre_compile_action: - echo "pre_compile_build1" compile_option: - -debug_access+pp - *common_compile post_compile_action: - echo "post_compile_build1" sim_option: - *common_sim groups: group1: build: build1 args: - -vh - -repeat 1 tests: - test1: args: - -repeat 10,-wave,-vh UVM_MIDIUM - test2: args: - -seed 1 - -wave fsdb
<filename>.codeclimate.yml engines: coffeelint: enabled: true eslint: enabled: true ratings: paths: - "**.coffee" - "**.js" exclude_paths:
package: name: pulp authors: - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" dependencies: common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.22.1 } tech_cells_generic: { git: "https://github.com/pulp-platform/tech_cells_generic.git", version: 0.2.5 } jtag_pulp: { git: "https://github.com/pulp-platform/jtag_pulp.git", rev: "v0.1" } pulp_soc: { git: "https://github.com/pulp-platform/pulp_soc.git", version: 3.0.0 } pulp_cluster: { git: "https://github.com/pulp-platform/pulp_cluster.git", rev: "e71ff80ab1e661a1ba4df78eceae42caeec074ad" } tbtools: { git: "https://github.com/pulp-platform/tbtools.git", version: 0.2.1 } export_include_dirs: - rtl/includes sources: # Source files grouped in levels. Files in level 0 have no dependencies on files in this # package. Files in level 1 only depend on files in level 0, files in level 2 on files in # levels 1 and 0, etc. Files within a level are ordered alphabetically. # Level 0 # Open models - target: any(test,simulation) files: - rtl/vip/spi_master_padframe.sv - rtl/vip/uart_tb_rx.sv - rtl/vip/camera/cam_vip.sv # S25FS256_model (SPI Flash) - target: all(any(test,simulation), flash_vip) defines: SPEEDSIM: ~ files: - rtl/vip/spi_flash/S25fs256s/model/s25fs256s.v # 24FC1025 model (I2C flash) - target: all(any(test,simulation), i2c_vip) defines: SPEEDSIM: ~ files: - rtl/vip/i2c_eeprom/24FC1025.v # hyper models - target: all(any(test,simulation), hyper_vip) defines: SPEEDSIM: ~ files: - rtl/vip/hyperflash_model/s26ks512s.v - rtl/vip/hyperram_model/s27ks0641.v # psram model - target: all(any(test, simulation), psram_vip) defines: SPEEDSIM: ~ files: - rtl/vip/psram_model/psram_fake.v # Workaround for unsupported *.vp_modelsim filetype in bender # i2s model - target: all(any(test,simulation), i2s_vip) defines: SPEEDSIM: ~ files: - rtl/vip/i2s/i2c_if.v - rtl/vip/i2s/i2s_vip_channel.sv - rtl/vip/i2s/i2s_vip.sv - rtl/pulp/cluster_domain.sv - rtl/pulp/jtag_tap_top.sv - rtl/pulp/pad_control.sv - rtl/pulp/pad_frame.sv - rtl/pulp/rtc_clock.sv - rtl/pulp/rtc_date.sv - rtl/pulp/soc_domain.sv # Level 1 - rtl/pulp/safe_domain.sv # Level 2 - rtl/pulp/pulp.sv # TB - target: any(test, simulation) files: # TB Level 0 - rtl/tb/riscv_pkg.sv - rtl/tb/SimDTM.sv - rtl/tb/SimJTAG.sv - rtl/tb/tb_clk_gen.sv - rtl/tb/tb_fs_handler.sv # TB Level 1 - rtl/tb/jtag_pkg.sv # TB Level 2 - rtl/tb/pulp_tap_pkg.sv # TB Level 3 - rtl/tb/tb_pulp.sv
os: linux dist: bionic language: python python: - "3.7" cache: pip: true ccache: true directories: - $HOME/.ccache - $HOME/.stack - $HOME/.conan env: global: - MAKEFLAGS="-j2" jobs: include: - stage: "Tests" name: "Python PEP8 checks" install: - pip install -r conf/requirements.txt script: - "make format" - "test $(git status --porcelain | wc -l) -eq 0 || { git diff; false; }" - name: "Generate report.hml" install: - sudo apt-get install gperf -y - sudo apt-get install libfl-dev -y - sudo apt-get install npm - sudo apt-get install gcc-8 -y - sudo apt-get install g++-8 -y - sudo apt-get install yosys -y - sudo apt-get install iverilog -y - sudo apt-get install verilator -y - sudo ln -sf /usr/bin/gcc-8 /usr/bin/gcc - sudo ln -sf /usr/bin/g++-8 /usr/bin/g++ - sudo ln -sf /usr/bin/ccache /usr/local/bin/gcc - sudo ln -sf /usr/bin/ccache /usr/local/bin/g++ - sudo ln -sf /usr/bin/ccache /usr/local/bin/gcc-8 - sudo ln -sf /usr/bin/ccache /usr/local/bin/g++-8 - pip install -r conf/requirements.txt - mkdir -p ~/.local/bin - export PATH=$HOME/.local/bin:$PATH - travis_retry curl -L https://get.haskellstack.org/stable/linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack' - curl https://sh.rustup.rs -sSf | sh -s -- -y - sudo cp .travis/disco.list /etc/apt/sources.list.d/disco.list - sudo apt update - sudo apt install libantlr4-runtime-dev antlr4 -y - pip install virtualenv - pip install -r third_party/tools/hdlConvertor/requirements.txt - source $HOME/.cargo/env script: - "make slang" - "make odin" - "make zachjs-sv2v" - "make tree-sitter-verilog" - "make sv-parser" - "make hdlConvertor" - "make generate-tests" - "make report USE_ALL_RUNNERS=1" - "touch out/report/.nojekyll" deploy: provider: pages github_token: $GH_TOKEN skip_cleanup: true keep_history: true local_dir: out/report verbose: true on: branch: master
<gh_stars>10-100 --- name: dual-dds board: boards/red-pitaya cores: - fpga/cores/redp_adc_v1_0 - fpga/cores/redp_dac_v1_0 - fpga/cores/axi_ctl_register_v1_0 - fpga/cores/axi_sts_register_v1_0 - fpga/cores/dna_reader_v1_0 - fpga/cores/axis_variable_v1_0 memory: - name: control offset: '0x40000000' range: 4K - name: status offset: '0x50000000' range: 4K control_registers: - led - phase_incr[4] status_registers: - adc[n_adc] parameters: fclk0: 200000000 adc_clk: 125000000 dac_width: 14 adc_width: 14 n_adc: 2 xdc: - ./xdc/ports.xdc - ./xdc/clocks.xdc drivers: - server/drivers/common.hpp - ./dual_dds.hpp web: - web/koheron.ts - ./web/dual_dds.ts - ./web/app.ts - ./web/control.ts - ./web/index.html - web/main.css - web/dds-frequency/dds-frequency.html - web/dds-frequency/dds-frequency.ts
name: CI on: push: branches: [ master ] pull_request: branches: [ master ] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: get container run: docker pull jeffbush001/nyuzi-build - name: build run: docker run -v $(pwd):/root/NyuziProcessor -w /root/NyuziProcessor jeffbush001/nyuzi-build /bin/bash -c scripts/run_ci_tests.sh
<filename>src_files.yml hci: vlog_opts: [ +nowarnSVCHK, -suppress 2275, -L hwpe_stream_lib, -L cluster_interconnect_lib, ] jg_inclibs: [ hwpe-stream, ] files: [ rtl/common/hci_package.sv, rtl/common/hci_interfaces.sv, rtl/interco/hci_log_interconnect.sv, rtl/interco/hci_log_interconnect_l2.sv, rtl/interco/hci_new_log_interconnect.sv, rtl/hci_interconnect.sv, rtl/interco/hci_hwpe_interconnect.sv, rtl/interco/hci_shallow_interconnect.sv, rtl/core/hci_core_fifo.sv, rtl/core/hci_core_assign.sv, rtl/core/hci_core_memmap_filter.sv, rtl/core/hci_core_memmap_demux_interl.sv, rtl/core/hci_core_sink.sv, rtl/core/hci_core_source.sv, rtl/core/hci_core_mux_static.sv, rtl/core/hci_core_mux_dynamic.sv, rtl/core/hci_core_load_store_mixer.sv, rtl/core/hci_core_r_valid_filter.sv, rtl/core/hci_core_cmd_queue.sv, rtl/core/hci_core_split.sv, rtl/mem/hci_mem_assign.sv, ]
<reponame>yufat48/pulpissimo package: name: axi_slice_dc authors: ["<NAME> <<EMAIL>>"] dependencies: axi: { git: "https://github.com/pulp-platform/axi.git", version: 0.4.3 } axi_slice: { git: "https://github.com/pulp-platform/axi_slice.git", version: 1.1.3 } sources: - src/axi_slice_dc_master.sv - src/axi_slice_dc_slave.sv - src/dc_data_buffer.sv - src/dc_full_detector.v - src/dc_synchronizer.v - src/dc_token_ring_fifo_din.v - src/dc_token_ring_fifo_dout.v - src/dc_token_ring.v - src/axi_slice_dc_master_wrap.sv - src/axi_slice_dc_slave_wrap.sv - src/axi_cdc.sv
#this is a example of how ENC ip is used in our yaml ENC: IP: __ENC CMD: ENC pha_out: PIN: A34 DIRECTION: out phb_out: PIN: A33 DIRECTION: out index_out: PIN: CPLD_IO50 DIRECTION: out
<gh_stars>1-10 axi_rab: files: [ rtl/axi4_ar_buffer.sv, rtl/axi4_ar_sender.sv, rtl/axi4_aw_buffer.sv, rtl/axi4_aw_sender.sv, rtl/axi4_b_buffer.sv, rtl/axi4_b_sender.sv, rtl/axi4_r_buffer.sv, rtl/axi4_r_sender.sv, rtl/axi4_w_buffer.sv, rtl/axi4_w_sender.sv, rtl/axi_buffer_rab_bram.sv, rtl/axi_buffer_rab.sv, rtl/axi_rab_cfg.sv, rtl/axi_rab_top.sv, rtl/check_ram.sv, rtl/fsm.sv, rtl/l2_tlb.sv, rtl/rab_core.sv, rtl/rab_slice.sv, rtl/ram_tp_write_first.sv, rtl/ram_tp_no_change.sv, rtl/slice_top.sv, ]
configMapGenerator: - name: xss-bot-config files: - pow generatorOptions: disableNameSuffixHash: true labels: type: generated annotations: note: generated
# This will run on Travis' 'new' container-based infrastructure sudo: true # Setup for Ubuntu Bionic (18.04) dist: bionic language: - python python: - 3.6 git: clone: false # Environment variables env: global: - GH_REPO_URL: github.com/slaclab/lcls2-timetool - PACKAGE_DIR: $HOME/packages - MINICONDA_DIR: $PACKAGE_DIR/miniconda - DOWNLOAD_DIR: ${HOME}/download - DOWNLOAD_URL: ${GH_REPO_URL}/releases/download/${TRAVIS_TAG}/rogue_${TRAVIS_TAG}.zip # Install dependencies addons: apt: packages: - python3 stages: - name: deploy_tag if: tag IS present # Generate and deploy documentation jobs: include: - &deploy-conda-stage # Conda for linux stage: deploy_tag name: "Deploy Conda" before_install: # Prepare folders - mkdir -p $MINICONDA_DIR - mkdir -p $DOWNLOAD_DIR install: # Install Anaconda for the right architecture (linux or osx) - cd $MINICONDA_DIR - wget -O miniconda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh; - bash miniconda.sh -b -p $HOME/miniconda - export PATH="$HOME/miniconda/bin:$PATH" - hash -r - conda config --set always_yes yes - conda install conda-build anaconda-client conda-verify - conda update -q conda conda-build # Download and unzip tag zipfile - cd $DOWNLOAD_DIR - wget -O file.zip $DOWNLOAD_URL - unzip file.zip script: # Build conda package - travis_wait 60 conda build --debug conda-recipe --output-folder bld-dir -c tidair-tag -c tidair-packages -c conda-forge after_success: # Upload conda package - anaconda -t $CONDA_UPLOAD_TOKEN_TAG upload bld-dir/`echo $TRAVIS_OS_NAME`-64/*.tar.bz2
<gh_stars>10-100 # Copyright 2021 OpenHW Group # Solderpad Hardware License, Version 2.1, see LICENSE.md for details. # SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1 common_cells: path: vendor/pulp_platform_common_cells axi: path: vendor/pulp_platform_axi fpnew: path: vendor/pulp_platform_fpnew cv32e40p: path: vendor/openhwgroup_cv32e40p register_interface: path: vendor/pulp_platform_register_interface riscv-dbg: path: vendor/pulp_platform_riscv_dbg tech_cells_generic: path: vendor/pulp_platform_tech_cells_generic efpga: path: efpga # TODO(timsaxe): Add source files to `core-v-mcu` # ql_fcb: # path: ql_fcb # ql_math_unit: # path: ql_math_unit L2_tcdm_hybrid_interco: path: L2_tcdm_hybrid_interco tcdm_interconnect: path: tcdm_interconnect apb2per: path: apb2per apb_adv_timer: path: apb_adv_timer apb_fll_if: path: apb_fll_if apb_gpio: path: apb_gpio apb_node: path: apb_node apb2apbcomp: path: apb2apbcomp apb_interrupt_cntrl: path: apb_interrupt_cntrl apb_timer_unit: path: apb_timer_unit axi_slice: path: axi_slice # TODO(zarubaf,timsaxe): Make Ibex and other upcoming cores configurable here. #ibex: # path: #ibex generic_FLL: path: generic_FLL udma/udma_core: path: udma/udma_core udma/udma_uart: path: udma/udma_uart udma/udma_i2c: path: udma/udma_i2c udma/udma_i2s: path: udma/udma_i2s udma/udma_qspi: path: udma/udma_qspi udma/udma_sdio: path: udma/udma_sdio udma/udma_camera: path: udma/udma_camera udma/udma_filter: path: udma/udma_filter udma/udma_external_per: path: udma/udma_external_per logint_dc_fifo_xbar: path: logint_dc_fifo_xbar core-v-mcu: path: core-v-mcu tb: path: tb vip: path: vip
name: Sky130hd_temp-sense-generator on: push: branches: - main pull_request: workflow_dispatch: jobs: sky130hd_temp: runs-on: ubuntu-latest steps: - name: Checkout repo uses: actions/checkout@v2 - name: Use PDK data from openfasoc_ci branch run: | mkdir -p /home/runner/testing && cd /home/runner/testing git clone https://github.com/saicharan0112/OpenFASOC.git cd OpenFASOC git checkout openfasoc_ci - name: Make Sky130HD Temp env: PDK_ROOT: /home/runner/testing/OpenFASOC/pdk_test IMAGE_NAME: efabless/openlane:2021.12.22_01.51.18 run: | cd $GITHUB_WORKSPACE touch file.log docker run --rm\ -e GITHUB_ACTIONS=1\ -v $PDK_ROOT:$PDK_ROOT\ -e PDK_ROOT=$PDK_ROOT\ -v $PWD:$PWD\ -w $PWD\ $IMAGE_NAME\ bash -c "\ yum install -y time &&\ cd ./openfasoc/generators/temp-sense-gen &&\ make sky130hd_temp\ ">> file.log if grep "\[ERROR\]" file.log; then exit 1; else exit 0; fi
<filename>docker-compose.yml<gh_stars>0 version: '3' services: xtradb-percona: image: percona:8.0 container_name: xtradb-percona environment: MYSQL_ROOT_PASSWORD: <PASSWORD> MYSQL_DATABASE: fletcherfiltering MYSQL_USER: fletcherfiltering MYSQL_PASSWORD: <PASSWORD> ports: - 3306:3306 volumes: # create volumes for use - mysql-data:/var/lib/mysql # bind mount my local my.cnf # - $PWD/my.cnf:/etc/my.cnf # SQL scripts #- ./sql-data/ticketassist.sql:/docker-entrypoint-initdb.d/ticketassist.sql command: # Workaround for no my.cnf in image - '--user=mysql' volumes: mysql-data:
<reponame>ikwzm/merge_sorter --- input_file : 02_word_package.akd output_file : ../02_word_package.md image_url : "Fig.1 ワードの構成" : "image/02_word_package_1.jpg" "Fig.2 BITS Field" : "image/02_word_package_2.jpg" "Fig.3 DATA_BITS DATA_LO DATA_HI Field" : "image/02_word_package_3.jpg" "Fig.4 DATA_COMP_HI DATA_COMP_LO Field" : "image/02_word_package_4.jpg" "Fig.5 ATRB_BITS ATRB_LO ATRB_HI Field" : "image/02_word_package_5.jpg" "Fig.6 ATRB_NONE_POS Field" : "image/02_word_package_6.jpg" "Fig.7 ATRB_PRIORITY_POS Field" : "image/02_word_package_7.jpg" "Fig.8 ATRB_POSTPEND_POS Field" : "image/02_word_package_8.jpg" link_list : - id : "「はじめに」" title: "「VHDL で書くマージソーター(はじめに)」" url : "./01_introduction.md" - id : "「ワードの定義」" title: "「VHDL で書くマージソーター(ワードの定義)」" url : "./02_word_package.md" - id : "「ワード比較器」" title: "「VHDL で書くマージソーター(ワード比較器)」" url : "./03_word_compare.md" - id : "「ソーティングネットワーク」" title: "「VHDL で書くマージソーター(ソーティングネットワーク)」" url : "./04_sorting_network.md" - id : "「バイトニックマージソート」" title: "「VHDL で書くマージソーター(バイトニックマージソート)」" url : "./05_bitonic_sorter.md" - id : "「バッチャー奇偶マージソート」" title: "「VHDL で書くマージソーター(バッチャー奇偶マージソート)」" url : "./06_oddeven_sorter.md" - id : "「シングルワード マージソート ノード」" title: "「VHDL で書くマージソーター(シングルワード マージソート ノード)」" url : "./07_merge_sort_node_single.md" - id : "「マルチワード マージソート ノード」" title: "「VHDL で書くマージソーター(マルチワード マージソート ノード)」" url : "./08_merge_sort_node_multi.md" - id : "「マージソート ツリー」" title: "「VHDL で書くマージソーター(マージソート ツリー)」" url : "./09_merge_sort_tree.md" - id : "「端数ワード処理」" title: "「VHDL で書くマージソーター(端数ワード処理)」" url : "./10_merge_sort_core_1.md" - id : "「ストリーム入力」" title: "「VHDL で書くマージソーター(ストリーム入力)」" url : "./11_merge_sort_core_2.md" - id : "「ストリームフィードバック」" title: "「VHDL で書くマージソーター(ストリームフィードバック)」" url : "./12_merge_sort_core_3.md" - id : "「ArgSort IP」" title: "「VHDL で書くマージソーター(ArgSort IP)」" url : "./13_argsort.md" - id : "「ArgSort-Ultra96」" title: "「VHDL で書くマージソーター(ArgSort-Ultra96)」" url : "https://github.com/ikwzm/ArgSort-Ultra96/blob/1.2.1/doc/ja/argsort-ultra96.md" - id : "「ArgSort-Kv260」" title: "「VHDL で書くマージソーター(ArgSort-Kv260)」" url : "https://github.com/ikwzm/ArgSort-Kv260/blob/1.2.1/doc/ja/argsort-Kv260.md" ---
# Copyright Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - iss: spike path_var: SPIKE_PATH cmd: > <path_var>/spike --log-commits --isa=<variant> -l <elf> - iss: ovpsim path_var: OVPSIM_PATH cmd: > <path_var>/riscvOVPsim.exe --variant <variant> --override riscvOVPsim/cpu/PMP_registers=0 --override riscvOVPsim/cpu/simulateexceptions=T --trace --tracechange --traceshowicount --program <elf> --finishafter 1000000 - iss: sail path_var: SAIL_RISCV cmd: > <path_var> <elf>
<reponame>pstegegn/ariane language: cpp # run on new infrastructure sudo: false cache: apt: true directories: $RISCV $VERILATOR_ROOT # required packages to install addons: apt: sources: - ubuntu-toolchain-r-test packages: - gcc-7 - g++-7 - gperf - autoconf - automake - autotools-dev - libmpc-dev - libmpfr-dev - libgmp-dev - gawk - build-essential - bison - flex - texinfo - python-pexpect - libusb-1.0-0-dev - default-jdk env: global: - RISCV="/home/travis/riscv_install" - VERILATOR_ROOT="/home/travis/verilator-3.924/" branches: only: - master - ariane_next before_install: - export CXX=g++-7 CC=gcc-7 # setup dependent paths - export PATH=$RISCV/bin:$VERILATOR_ROOT/bin:$PATH - export LIBRARY_PATH=$RISCV/lib - export LD_LIBRARY_PATH=$RISCV/lib - export C_INCLUDE_PATH=$RISCV/include:$VERILATOR_ROOT/include - export CPLUS_INCLUDE_PATH=$RISCV/include:$VERILATOR_ROOT/include # number of parallel jobs to use for make commands and simulation - export NUM_JOBS=4 - ci/make-tmp.sh - git submodule update --init --recursive stages: - compile1 - compile2 - test jobs: include: - stage: compile1 name: build gcc script: - ci/build-riscv-gcc.sh - stage: compile2 name: build tools script: - ci/install-fesvr.sh - ci/install-verilator.sh - ci/install-dtc.sh - ci/install-spike.sh - stage: test name: run riscv benchmarks script: - ci/build-riscv-tests.sh - make -j${NUM_JOBS} run-benchmarks-verilator # rv64ui-p-* tests - stage: test name: run asm tests1 script: - ci/build-riscv-tests.sh - make -j${NUM_JOBS} run-asm-tests1-verilator # rv64ui-v-* tests - stage: test name: run asm tests2 script: - ci/build-riscv-tests.sh - make -j${NUM_JOBS} run-asm-tests2-verilator - stage: test name: run torture script: - ci/get-torture.sh - make clean - make torture-gen - make torture-rtest-verilator # extra time during long builds install: travis_wait
package: name: rv_plic authors: - "low risc" - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" sources: - rtl/reg_intf_pkg.sv - rtl/rv_plic_target.sv - rtl/rv_plic_gateway.sv - rtl/plic_regmap.sv - rtl/plic_top.sv
--- input_file : ../akane/01_introduction.akd output_file : 01_introduction.md link_list : - id : "はじめに" title: "「VHDL で書くマージソーター(はじめに)」@Qiita" url : "https://qiita.com/ikwzm/items/6665b2ef44d878a5b85f" - id : "ワードの定義" title: "「VHDL で書くマージソーター(ワードの定義)」@Qiita" url : "https://qiita.com/ikwzm/items/bdcd8876317b908ff492" - id : "ワード比較器" title: "「VHDL で書くマージソーター(ワード比較器)」@Qiita" url : "https://qiita.com/ikwzm/items/d5d1dd264b1670f33bd7" - id : "ソーティングネットワーク" title: "「VHDL で書くマージソーター(ソーティングネットワーク)」@Qiita" url : "https://qiita.com/ikwzm/items/a1d06e47523759c726ae" - id : "バイトニックマージソート" title: "「VHDL で書くマージソーター(バイトニックマージソート)」@Qiita" url : "https://qiita.com/ikwzm/items/366eacbf6a877994c955" - id : "バッチャー奇偶マージソート" title: "「VHDL で書くマージソーター(バッチャー奇偶マージソート)」@Qiita" url : "https://qiita.com/ikwzm/items/c21a53f21b87408a7805" - id : "シングルワード マージソート ノード" title: "「VHDL で書くマージソーター(シングルワード マージソート ノード)」@Qiita" url : "https://qiita.com/ikwzm/items/7fd7ef9ffc4d9b314fee" - id : "マルチワード マージソート ノード" title: "「VHDL で書くマージソーター(マルチワード マージソート ノード)」@Qiita" url : "https://qiita.com/ikwzm/items/ed96b7a44b83bcee4ba5" - id : "マージソート ツリー" title: "「VHDL で書くマージソーター(マージソート ツリー)」@Qiita" url : "https://qiita.com/ikwzm/items/1f76ae5cda95aaf92501" - id : "端数ワード処理" title: "「VHDL で書くマージソーター(端数ワード処理)」@Qiita" url : "https://qiita.com/ikwzm/items/6b15340f1e05ef03f8d0" - id : "ストリーム入力" title: "「VHDL で書くマージソーター(ストリーム入力)」@Qiita" url : "https://qiita.com/ikwzm/items/56e22511021a082a2ccd" - id : "ストリームフィードバック" title: "「VHDL で書くマージソーター(ストリームフィードバック)」@Qiita" url : "https://qiita.com/ikwzm/items/e8c59c0ec92956c9355f" - id : "ArgSort" title: "「VHDL で書くマージソーター(ArgSort IP)」@Qiita" url : "https://qiita.com/ikwzm/items/89fc9542492fca74c9e3" - id : "ArgSort-Ultra96" title: "「VHDL で書くマージソーター(ArgSort-Ultra96)」@Qiita" url : "https://qiita.com/ikwzm/items/d58c9b77d038e23ac792" - id : "ArgSort-Kv260" title: "「VHDL で書くマージソーター(ArgSort-Kv260)」@Qiita" url : "https://qiita.com/ikwzm/items/ec0f779534c44b35334a" - id : "ACRi" title: "アダプティブコンピューティング研究推進体(ACRi)" url : "https://www.acri.c.titech.ac.jp/wp" - id : "アダプティブコンピューティング研究推進体(ACRi)" title: "アダプティブコンピューティング研究推進体(ACRi)" url : "https://www.acri.c.titech.ac.jp/wp" - id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」" title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」" url : "https://www.acri.c.titech.ac.jp/wordpress/archives/132" - id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(2)」" title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(2)」" url : "https://www.acri.c.titech.ac.jp/wordpress/archives/501" - id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(3)」" title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(3)」" url : "https://www.acri.c.titech.ac.jp/wordpress/archives/2393" - id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(4)」" title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(4)」" url : "https://www.acri.c.titech.ac.jp/wordpress/archives/3888" - id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(5)」" title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(5)」" url : "https://www.acri.c.titech.ac.jp/wordpress/archives/4713" seg_level : -1 ---
<gh_stars>1-10 # Copyright (c) 2020 <NAME> <<EMAIL>> # # SPDX-License-Identifier: Apache-2.0 identifier: service name: servant SoC for iCE40 type: mcu arch: riscv32 toolchain: - zephyr ram: 32 testing: ignore_tags: - net - bluetooth
<reponame>lowRISC/axi_mem_if axi_mem_if: files: [ src/deprecated/axi_mem_if.sv, src/deprecated/axi_mem_if_wrap.sv, src/deprecated/axi_mem_if_var_latency.sv, ]
<gh_stars>1-10 arch: - block: 32 name: - arch_tlut_systolic_projection_bank16_block32 - arch_tlut_systolic_projection_bank8_block32 - arch_tlut_systolic_projection_bank4_block32 - block: 16 name: - arch_tlut_systolic_projection_bank16_block16 - arch_tlut_systolic_projection_bank8_block16 - arch_tlut_systolic_projection_bank4_block16 - block: 8 name: - arch_tlut_systolic_projection_bank16_block8 - arch_tlut_systolic_projection_bank8_block8 - arch_tlut_systolic_projection_bank4_block8 workloads: - convnet - alexnet dataflow: - dtfs
<reponame>slaclab/lsst-pwr-ctrl-core # This will run on Travis' 'new' container-based infrastructure sudo: true # Setup for Ubuntu Bionic (18.04) dist: bionic language: - python python: - 3.6 # Environment variables env: global: - PACKAGE_DIR: $HOME/packages - DOXYFILE: $TRAVIS_BUILD_DIR/Doxyfile - MINICONDA_DIR: $PACKAGE_DIR/miniconda # Install dependencies addons: apt: packages: - python3 stages: - name: deploy_dev if: branch = pre-release AND tag IS blank AND NOT (type = pull_request) - name: deploy_tag if: tag IS present # Generate and deploy documentation jobs: include: - &deploy-conda-stage # Conda for linux stage: deploy_dev name: "Deploy Conda" env: CONDA_TOKEN=$CONDA_UPLOAD_TOKEN_DEV before_install: # Prepare folders - mkdir -p $MINICONDA_DIR #- git pull --unshallow - git pull # on OSX rogue needs an older version of the MacOS SDK - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then git clone https://github.com/phracker/MacOSX-SDKs; sudo mv MacOSX-SDKs/MacOSX10.9.sdk /opt/; export CONDA_BUILD_SYSROOT=/opt/MacOSX10.9.sdk; export CONDA_BUILD=1; fi install: # Install Anaconda for the right architecture (linux or osx) - cd $MINICONDA_DIR - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then wget -O miniconda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh; else wget -O miniconda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh; fi - bash miniconda.sh -b -p $HOME/miniconda - export PATH="$HOME/miniconda/bin:$PATH" - hash -r - conda config --set always_yes yes - conda install conda-build anaconda-client conda-verify - conda update -q conda conda-build before_script: # Go back to top directory - cd $TRAVIS_BUILD_DIR script: # Build conda package - travis_wait 60 conda build --debug conda-recipe --output-folder bld-dir -c tidair-tag -c conda-forge after_success: # Upload conda package - anaconda -t $CONDA_TOKEN upload bld-dir/`echo $TRAVIS_OS_NAME`-64/*.tar.bz2 #- <<: *deploy-conda-stage # Conda for MacOS #os: osx #language: ruby # osx does not support language=python - <<: *deploy-conda-stage # Conda for Linux stage: deploy_tag env: CONDA_TOKEN=$CONDA_UPLOAD_TOKEN_TAG #- <<: *deploy-conda-stage # Conda for macOS #stage: deploy_tag #os: osx #language: ruby # osx does not support language=python #env: CONDA_TOKEN=$CONDA_UPLOAD_TOKEN_TAG
package: name: event_unit_flex dependencies: common_cells: { git: "<EMAIL>:pulp-platform/common_cells.git", version: 1.13.1 } # pulp_cluster: this package uses cluster_event_map defined in pulp_cluster, however is also a dependency of pulp_cluster. cyclical dependencies seem suboptimal. sources: include_dirs: ["rtl"] files: # Source files grouped in levels. Files in level 0 have no dependencies on files in this # package. Files in level 1 only depend on files in level 0, files in level 2 on files in # levels 1 and 0, etc. Files within a level are ordered alphabetically. # Level 0 - rtl/event_unit_core.sv # multiple modules defined - rtl/hw_barrier_unit.sv - rtl/hw_dispatch.sv - rtl/hw_mutex_unit.sv # multiple modules defined - rtl/interc_sw_evt_trig.sv - rtl/periph_FIFO_id.sv - rtl/soc_periph_fifo.sv # Level 1 - rtl/event_unit_interface_mux.sv # Level 2 - rtl/event_unit_top.sv
<gh_stars>10-100 language: python python: '3.8' install: pip install tox script: PY_COLORS=1 tox -e $BUILD_NAME stages: - test - deploy matrix: include: - &docker env: - BUILD_NAME=py38-acceptance-ghdl - DOCKER_IMAGE=llvm services: docker language: minimal install: skip script: docker run --rm -tv $(pwd):/src -w /src vunit/dev:$DOCKER_IMAGE tox -e $BUILD_NAME - <<: *docker env: - BUILD_NAME=py38-vcomponents-ghdl - DOCKER_IMAGE=mcode - env: BUILD_NAME=py38-fmt script: PY_COLORS=1 tox -e $BUILD_NAME -- --check - env: BUILD_NAME="py38-lint -- --color=yes" - env: BUILD_NAME="py38-unit -- --color=yes" - env: BUILD_NAME=py38-docs before_script: git fetch --unshallow --tags # Deploy to PyPI whenever the package version has changed # When a package version has not changed a new upload will not be triggered - stage: deploy if: tag IS present script: - git fetch --unshallow --tags - python tools/release.py validate deploy: provider: pypi distributions: sdist skip_cleanup: true skip_upload_docs: true user: $PYPI_USER password: <PASSWORD> on: repo: VUnit/vunit all_branches: true