Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/dev' into fix/tests_parallelis…
Browse files Browse the repository at this point in the history
…m_safe
  • Loading branch information
auphelia committed Feb 20, 2025
2 parents ee0943a + 76eede6 commit 05a22bd
Show file tree
Hide file tree
Showing 9 changed files with 48 additions and 62 deletions.
1 change: 1 addition & 0 deletions docker/Dockerfile.finn
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ RUN pip install pytest-metadata==1.7.0
RUN pip install pytest-html==3.0.0
RUN pip install pytest-html-merger==0.0.8
RUN pip install pytest-cov==4.1.0
RUN pip install pyyaml==6.0.1

# extra dependencies from other FINN deps
# installed in Docker image to make entrypoint script go faster
Expand Down
26 changes: 11 additions & 15 deletions docker/jenkins/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ pipeline {
cleanPreviousBuildFiles(env.FINN_HOST_BUILD_DIR)

// Pass in the marker to run with pytest and the XML test results filename
runDockerPytestWithMarker("fpgadataflow", "${env.TEST_NAME}", "--cov --cov-report=html:coverage_fpgadataflow")
runDockerPytestWithMarker("fpgadataflow", "${env.TEST_NAME}", "--cov --cov-report=html:coverage_fpgadataflow -n ${env.NUM_PYTEST_WORKERS} --dist worksteal")

// Stash the test results file(s)
stash name: env.TEST_NAME, includes: "${env.TEST_NAME}.xml,${env.TEST_NAME}.html"
Expand Down Expand Up @@ -324,21 +324,17 @@ void runDockerPytestWithMarker(String marker, String testResultsFilename, String
sh """./run-docker.sh python -m pytest -m ${marker} --junitxml=${testResultsFilename}.xml --html=${testResultsFilename}.html --self-contained-html ${additionalOptions}"""
}

def findBoardBuildFiles(String searchDir, String dirToFind) {
def result = sh(script: "find $searchDir -type d -name \"$dirToFind*\"", returnStdout: true).trim()
if (result.empty) {
error "Directory containing '$dirToFind' not found."
}
return result
}

void findCopyZip(String board, String findDir, String copyDir) {
def buildDir = findBoardBuildFiles(findDir, "hw_deployment_${board}")
sh "cp -r ${buildDir}/${board} ${copyDir}/"
dir(copyDir) {
sh "zip -r ${board}.zip ${board}/"
sh "mkdir -p ${env.ARTIFACT_DIR}/${copyDir}/"
sh "cp ${board}.zip ${env.ARTIFACT_DIR}/${copyDir}/"
sh "mkdir -p ${copyDir}"
try {
sh "cp -r ${findDir}/hw_deployment_*/${board} ${copyDir}/"
dir(copyDir) {
sh "zip -r ${board}.zip ${board}/"
sh "mkdir -p ${env.ARTIFACT_DIR}/${copyDir}/"
sh "cp ${board}.zip ${env.ARTIFACT_DIR}/${copyDir}/"
}
} catch (err) {
error "No ${board} hw_deployment_* build artifacts found in ${findDir}"
}
}

Expand Down
5 changes: 2 additions & 3 deletions notebooks/end2end_example/bnn-pynq/cnv_end2end_example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -484,8 +484,7 @@
"metadata": {},
"outputs": [],
"source": [
"from shutil import copy\n",
"from distutils.dir_util import copy_tree\n",
"from shutil import copy, copytree\n",
"\n",
"# create directory for deployment files\n",
"deployment_dir = make_build_dir(prefix=\"pynq_deployment_\")\n",
Expand All @@ -503,7 +502,7 @@
"\n",
"# driver.py and python libraries\n",
"pynq_driver_dir = model.get_metadata_prop(\"pynq_driver_dir\")\n",
"copy_tree(pynq_driver_dir, deployment_dir)"
"copytree(pynq_driver_dir, deployment_dir, dirs_exist_ok=True)"
]
},
{
Expand Down
5 changes: 2 additions & 3 deletions notebooks/end2end_example/bnn-pynq/tfc_end2end_example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -895,8 +895,7 @@
"metadata": {},
"outputs": [],
"source": [
"from shutil import copy\n",
"from distutils.dir_util import copy_tree\n",
"from shutil import copy, copytree\n",
"\n",
"# create directory for deployment files\n",
"deployment_dir = make_build_dir(prefix=\"pynq_deployment_\")\n",
Expand All @@ -914,7 +913,7 @@
"\n",
"# driver.py and python libraries\n",
"pynq_driver_dir = model.get_metadata_prop(\"pynq_driver_dir\")\n",
"copy_tree(pynq_driver_dir, deployment_dir)"
"copytree(pynq_driver_dir, deployment_dir, dirs_exist_ok=True)"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion run-docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ SCRIPTPATH=$(dirname "$SCRIPT")
: ${PLATFORM_REPO_PATHS="/opt/xilinx/platforms"}
: ${XRT_DEB_VERSION="xrt_202220.2.14.354_22.04-amd64-xrt"}
: ${FINN_HOST_BUILD_DIR="/tmp/$DOCKER_INST_NAME"}
: ${FINN_DOCKER_TAG="xilinx/finn:$(git describe --always --tags --dirty).$XRT_DEB_VERSION"}
: ${FINN_DOCKER_TAG="xilinx/finn:$(OLD_PWD=$(pwd); cd $SCRIPTPATH; git describe --always --tags --dirty; cd $OLD_PWD).$XRT_DEB_VERSION"}
: ${FINN_DOCKER_PREBUILT="0"}
: ${FINN_DOCKER_RUN_AS_ROOT="0"}
: ${FINN_DOCKER_GPU="$(docker info | grep nvidia | wc -m)"}
Expand Down
11 changes: 6 additions & 5 deletions src/finn/builder/build_dataflow_steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import shutil
import warnings
from copy import deepcopy
from distutils.dir_util import copy_tree
from functools import partial
from qonnx.core.modelwrapper import ModelWrapper
from qonnx.custom_op.registry import getCustomOp
Expand Down Expand Up @@ -656,7 +655,9 @@ def step_create_stitched_ip(model: ModelWrapper, cfg: DataflowBuildConfig):
)
)
# TODO copy all ip sources into output dir? as zip?
copy_tree(model.get_metadata_prop("vivado_stitch_proj"), stitched_ip_dir)
shutil.copytree(
model.get_metadata_prop("vivado_stitch_proj"), stitched_ip_dir, dirs_exist_ok=True
)
print("Vivado stitched IP written into " + stitched_ip_dir)
if VerificationStepType.STITCHED_IP_RTLSIM in cfg._resolve_verification_steps():
# prepare ip-stitched rtlsim
Expand Down Expand Up @@ -761,7 +762,7 @@ def step_make_pynq_driver(model: ModelWrapper, cfg: DataflowBuildConfig):
if DataflowOutputType.PYNQ_DRIVER in cfg.generate_outputs:
driver_dir = cfg.output_dir + "/driver"
model = model.transform(MakePYNQDriver(cfg._resolve_driver_platform()))
copy_tree(model.get_metadata_prop("pynq_driver_dir"), driver_dir)
shutil.copytree(model.get_metadata_prop("pynq_driver_dir"), driver_dir, dirs_exist_ok=True)
print("PYNQ Python driver written into " + driver_dir)
return model

Expand Down Expand Up @@ -862,8 +863,8 @@ def step_deployment_package(model: ModelWrapper, cfg: DataflowBuildConfig):
bitfile_dir = cfg.output_dir + "/bitfile"
driver_dir = cfg.output_dir + "/driver"
os.makedirs(deploy_dir, exist_ok=True)
copy_tree(bitfile_dir, deploy_dir + "/bitfile")
copy_tree(driver_dir, deploy_dir + "/driver")
shutil.copytree(bitfile_dir, deploy_dir + "/bitfile", dirs_exist_ok=True)
shutil.copytree(driver_dir, deploy_dir + "/driver", dirs_exist_ok=True)
return model


Expand Down
5 changes: 0 additions & 5 deletions src/finn/custom_op/fpgadataflow/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@

import numpy as np
from qonnx.core.datatype import DataType
from qonnx.util.basic import roundup_to_integer_multiple

from finn.custom_op.fpgadataflow.hwcustomop import HWCustomOp

Expand Down Expand Up @@ -134,10 +133,6 @@ def execute_node(self, context, graph):
result = np.concatenate(inp_values, axis=-1)
context[node.output[0]] = result

def get_instream_width_padded(self, ind=0):
in_width = self.get_instream_width(ind)
return roundup_to_integer_multiple(in_width, 8)

def get_verilog_top_module_intf_names(self):
intf_names = super().get_verilog_top_module_intf_names()
n_inputs = self.get_n_inputs()
Expand Down
39 changes: 13 additions & 26 deletions src/finn/custom_op/fpgadataflow/hls/lookup_hls.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@

import numpy as np
import os
import warnings
from math import ceil, log2
from qonnx.core.datatype import DataType

Expand Down Expand Up @@ -87,31 +88,6 @@ def defines(self, var):
my_defines.append("#define EmbeddingType %s" % emb_hls_type)
self.code_gen_dict["$DEFINES$"] = my_defines

def read_npy_data(self):
code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
dtype = self.get_input_datatype()
if dtype == DataType["BIPOLAR"]:
# use binary for bipolar storage
dtype = DataType["BINARY"]
elem_bits = dtype.bitwidth()
packed_bits = self.get_instream_width()
packed_hls_type = "ap_uint<%d>" % packed_bits
elem_hls_type = dtype.get_hls_datatype_str()
npy_type = "int64_t"
npy_in = "%s/input_0.npy" % code_gen_dir
self.code_gen_dict["$READNPYDATA$"] = []
self.code_gen_dict["$READNPYDATA$"].append(
'npy2apintstream<%s, %s, %d, %s>("%s", in0_%s);'
% (
packed_hls_type,
elem_hls_type,
elem_bits,
npy_type,
npy_in,
self.hls_sname(),
)
)

def dataoutstrm(self):
code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
dtype = self.get_output_datatype()
Expand Down Expand Up @@ -273,7 +249,18 @@ def execute_node(self, context, graph):
)

inp = context[node.input[0]]
assert inp.dtype == np.int64, "Inputs must be contained in int64 ndarray"

# Make sure the input has the right container datatype
if inp.dtype is not np.float32:
# Issue a warning to make the user aware of this type-cast
warnings.warn(
f"{node.name}: Changing input container datatype from "
f"{inp.dtype} to {np.float32}"
)
# Convert the input to floating point representation as the
# container datatype
inp = inp.astype(np.float32)

assert inp.shape == exp_ishape, """Input shape doesn't match expected shape."""
export_idt = self.get_input_datatype()
odt = self.get_output_datatype()
Expand Down
16 changes: 12 additions & 4 deletions src/finn/custom_op/fpgadataflow/rtl/streamingfifo_rtl.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,10 +133,18 @@ def execute_node(self, context, graph):
elif mode == "rtlsim":
code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
# create a npy file for the input of the node
assert (
str(inp.dtype) == "float32"
), """Input datatype is
not float32 as expected."""

# Make sure the input has the right container datatype
if inp.dtype is not np.float32:
# Issue a warning to make the user aware of this type-cast
warnings.warn(
f"{node.name}: Changing input container datatype from "
f"{inp.dtype} to {np.float32}"
)
# Convert the input to floating point representation as the
# container datatype
inp = inp.astype(np.float32)

expected_inp_shape = self.get_folded_input_shape()
reshaped_input = inp.reshape(expected_inp_shape)
if DataType[self.get_nodeattr("dataType")] == DataType["BIPOLAR"]:
Expand Down

0 comments on commit 05a22bd

Please sign in to comment.