Giter Site home page Giter Site logo

native-utils's Introduction

Native Utils and Toolchain Plugin

CI

DSL Documentation for Native Utils

nativeUtils {
  platformConfigs {
    linuxathena {
      // The platform path for archives. Must be set
      platformPath = "linux/athena"

      cppCompiler {
        // Args for debug and release
        args << ""
        // Args for debug
        debugArgs << ""
        // Args for release
        releaseArgs << ""
      }
      // These are identical to cppCompiler
      linker {}
      cCompiler {}
      assembler {}
      objcppCompiler {}
      objcCompiler {}
    }
  }

  // Windows specific functionality to export all symbols from a binary automatically
  exportsConfigs {
    libName {
      x86ExcludeSymbols << ""
      x64ExcludeSymbols << ""
      excludeBuildTypes << ""
      x86SymbolFilter = { symbols ->
        symbols.removeIf({ !it.startsWith('HAL_') && !it.startsWith('HALSIM_') })
      }
      x64SymbolFilter = { symbols ->
        symbols.removeIf({ !it.startsWith('HAL_') && !it.startsWith('HALSIM_') })
      }
    }
  }
  // Multi platform way to expose only a limited number of symbols
  // Used to do private symbols. Can not cross libraries with exportsConfigs
  privateExportsConfigs {
    libName {
      exportsFile = project.file("path/to/symbols/files")
    }

  }
  // Add a dependency
  dependencyConfigs {
    libraryName {
      groupId = ""
      artifactId = ""
      headerClassifier = ""
      sourceClassifier = ""
      ext = ""
      version = ""
      // If the shared dependencies are used at runtime, or just linking
      // Defaults to true
      sharedUsedAtRuntime = true
      sharedPlatforms << ""
      staticPlatforms << ""
    }
  }
  // Add
  combinedDependencyConfigs {
    combinedName {
      // The name to use from use*Library
      libraryName = ""
      // The platforms to apply to
      targetPlatforms << ""
      // The dependencies to combine
      dependencies << ""
    }
  }
}

// Get the platform path for a binary
nativeUtils.getPlatformPath(NativeBinarySpec binary)
// Get the classifier for a dependency
nativeUtils.getDependencyClassifier(NativeBinarySpec, boolean isStaticDependnecy)
// Get the classifier for a published binary
nativeUtils.getPublishClassifier(NativeLibraryBinarySpec)

// Add libraries that are required to build, add to all binaries for a component
nativeUtils.useRequiredLibrary(ComponentSpec, String... libraries)
// Add libraries that are required to build, add to specific binary
nativeUtils.useRequiredLibrary(BinarySpec, String.. libraries)

// Add libraries that are optional, add to all binaries for a component
nativeUtils.useOptionalLibrary(ComponentSpec, String... libraries)
// Add libraries that are optional, add to specific binary
nativeUtils.useOptionalLibrary(BinarySpec, String.. libraries)
// The optional ones will be silently skipped

// Add all native utils platforms to a component
nativeUtils.useAllPlatforms(ComponentSpec)

// Update a platform (see platformsConfig block above for documentation)
// This can be used for adding or removing args.
nativeUtils.configurePlatform("platformName") {
}

// Add all arguments for a platform to the binary
nativeUtils.usePlatformArguments(NativeBinarySpec)

// Add all arguments for a platform to all components of a binary
nativeUtils.usePlatformArguments(NativeComponentSpec)

// Add WPI extensions to Native Utils
// See below for DSL of these extensions
nativeUtils.addWpiNativeUtils()

// This adds all the WPILib dependencies, along with combined deps for
// wpilib and driver. They still need to manually be added to individual
// components. These just add to the back end
nativeUtils.wpi.configureDependencies {
  // Thse are the 6 separate versions used for wpi
  // deps. They should be kept in sync.
  wpiVersion = ""
  niLibVersion = ""
  opencvVersion = ""
  googleTestVersion = ""
}

// The 6 below get the string representation of the main platforms
// For use comparing to binary.targetPlatform.name
nativeUtils.wpi.platforms.roborio
nativeUtils.wpi.platforms.linuxarm32
nativeUtils.wpi.platforms.linuxarm64
nativeUtils.wpi.platforms.windowsx64
nativeUtils.wpi.platforms.osxuniversal
nativeUtils.wpi.platforms.linuxx64

// An immutable list of all wpi platforms
nativeUtils.wpi.platforms.allPlatforms

// A bunch of lists of the default arguments for platforms.
nativeUtils.wpi.defaultArguments.*


// Enable warnings for all platforms. Pass specific platforms in to enable them for just those platforms
nativeUtils.wpi.addWarnings()

// Enable warnings as errors for all platforms. Pass specific platforms in to enable them for just those platforms
// Does not enable warnings, that is still handled by the call above.
nativeUtils.wpi.addWarningsAsErrors()

DS Documentation for Toolchain Builder

toolchainsPlugin {
  // Register the platforms and build types with the model
  // Default to true
  registerPlatforms = true
  registerReleaseBuildType = true
  registerDebugBuildType = true

  // Add the roborio compiler
  withCrossRoboRIO()
  // Add the raspbian compiler
  withCrossLinuxArm32()
  // The above 2 are included with nativeUtils.addWpiNativeUtils()

  crossCompilers {
    linuxaarch64 {
        architecture = "aarch64"
        compilerPrefix = "arm-frc2019-linux-gnueabi-"
        operatingSystem = "linux"
        optional = false
    }
  }
}

Adding a non standard cross compiler

Use the following to add a custom cross compiler, with the same args as the rio and raspbian

nativeUtils.addWpiNativeUtils() // Must be called before using nativeUtils.wpi.defaultArguments

toolchainsPlugin.crossCompilers {
    linuxaarch64 {
        architecture = "aarch64"
        compilerPrefix = "aarch64-linux-gnu-"
        operatingSystem = "linux"
        optional = false
    }
}

nativeUtils.platformConfigs {
    linuxaarch64 {
        platformPath = "linux/aarch64"
        cppCompiler.args.addAll(nativeUtils.wpi.defaultArguments.linuxCrossCompilerArgs);
        cCompiler.args.addAll(nativeUtils.wpi.defaultArguments.linuxCrossCCompilerArgs);
        linker.args.addAll(nativeUtils.wpi.defaultArguments.linuxCrossLinkerArgs);
        cppCompiler.debugArgs.addAll(nativeUtils.wpi.defaultArguments.linuxCrossDebugCompilerArgs);
        cppCompiler.releaseArgs.addAll(nativeUtils.wpi.defaultArguments.linuxCrossReleaseCompilerArgs);
    }
}

Using custom builds

To use a custom build of native-utils in a robot project, the build must be published, and a GradleRIO build that uses the new version must be pulished.

  1. Update the version in build.gradle so that native-utils won't overwrite an existing version.
allprojects {
    group = "edu.wpi.first"
    version = "2024.3.1"
  1. Execute .\gradlew publishToMavenLocal
  2. Update native-utils version in GradleRIO build.gradle: api 'edu.wpi.first:native-utils:2024.3.1'
  3. Follow the directions in the GradleRIO readme for publishing a local build and using in a robot program

native-utils's People

Contributors

auscompgeek avatar austinshalit avatar calcmogul avatar jacibrunning avatar jlleitschuh avatar jwhite66 avatar mcm001 avatar peterjohnson avatar prateekma avatar sciencewhiz avatar starlight220 avatar thadhouse avatar

Stargazers

 avatar  avatar  avatar  avatar  avatar

Watchers

 avatar  avatar  avatar  avatar  avatar  avatar  avatar

native-utils's Issues

Enable `/bigobj`

Enabling bigobj in MSCV fixes compilation problems with wpilib state space. Current fix in the wpilib state space vendor dependency is to add this to the build.gradle model block:

  binaries {
    all {
      if (toolChain in VisualCpp) {
        cppCompiler.args '/bigobj'
      }
    }
  }

Increased looping cause slow compilation

While a separation of functionality is nice, having multiple rules where everything gets enumerated multiple times is causing slow configuration times. We could keep most of the usability and speed this library up.

In addition, we could also avoid some closure allocation.

Remove -g flag from release build flags

Adding this flag to 3512's athena release GradleRIO build increases compile times with two threads from 7m 30s to 19m. The state-space libraries are very template-heavy, which generates a lot of debug info.

I suggest that we remove the -g flag from release builds and unconditionally add it to debug builds. Debug info isn't as useful when -O2 optimizations are applied.

SourceLinkGenerationTask fails on Windows when Git remote URL uses SSH

When building allwpilib on Windows, if the repo was cloned using SSH instead of HTTPS, the SourceLinkGenerationTask fails with "No match found". The issue is the regex match in resolveSubmodules, which only supports HTTPS URLs.

I encountered this because I cloned allwpilib through WSL, and used SSH as usual. The SourceLinkGenerationTask only runs on Windows, and I guess most Windows users have always just used HTTPS?

Vendordep Shared Object Dependencies

Is there a way to specify library dependencies in a vendor json file?

Our team has a vendor dependency that depends on the PhoenixV6 vendordep, which strictly uses shared libraries. The problem is that if the end-user has a different version of the PhoenixV6 vendordep added to their project, the build fails with linker errors. Other libraries, such as pathplanner, solve this issue by selecting the static library option for their vendordeps, but there aren't any static versions of the PhoenixV6 vendordep, so this isn't feasible for me.

So, is there any way to specify in our vendordep that we depend on a specific version of another vendordep?

Make C++ standard user-selectable

C++ dashboard applications can use newer compilers and standard versions than C++17. native-utils appends the C++17 flag after the user's flags, so it always overrides the standard version to C++17.

I tried changing the standard version by adding it.cppCompiler.args.add('-std=c++20') and it.cppCompiler.args.remove('-std=c++17') to build.gradle. The former added the C++20 flag, but the latter didn't stop the C++17 flag from being added, since I assume that happens later.

NativePlatforms returns the architecture as 32 bit x86 when running on ARM64

NativePlatforms.java#L13

FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':extractTestJNI'.
> Could not resolve all files for configuration ':nativeDesktopZip'.
   > Could not find opencv-cpp-3.4.7-5-osxx86.zip (edu.wpi.first.thirdparty.frc2021.opencv:opencv-cpp:3.4.7-5).
     Searched in the following locations:
         https://frcmaven.wpi.edu/artifactory/release/edu/wpi/first/thirdparty/frc2021/opencv/opencv-cpp/3.4.7-5/opencv-cpp-3.4.7-5-osxx86.zip

I first noticed this error nearly a year ago when I started using an M1 Macbook Pro. What I noticed was, at the time, the build compiled correctly when running natively. That was because my native Java install was an x86 build of AdoptOpenJDK running in Rosetta 2. It also compiled properly in the WPILib Docker image because Docker automatically emulated it in x86. When I recently updated to using an arm64 build of OpenJDK, that bug that I first noticed in January came back. This time, I traced the cause of the error to edu.wpi.first.toolchain.NativePlatforms. NativePlatforms is referenced in the WPILib Gradle extension which is then called in the standard build.gradle generated by the wpilib project creator when downloading C++ dependencies during the Java build.

Temporary Fix:

  • Comment out the lines in build.gradle where it downloads packages specific to the desktop platform

Possible Solutions:

  • intentionally make the architecture return x86-64 when aarch64/arm64 is detected so that it successfully downloads a file to make gradle happy for now and just warn the user that it is not officially supported
  • make arm64 builds of linux (or maybe only Ubuntu) return the platform as linuxaarch64bionic
  • add arm64 macOS as a build target?

Helpful Info:

% java --version
openjdk 11.0.12 2021-07-20
OpenJDK Runtime Environment Homebrew (build 11.0.12+0)
OpenJDK 64-Bit Server VM Homebrew (build 11.0.12+0, mixed mode)
import org.gradle.internal.os.OperatingSystem

println OperatingSystem.current() // Mac OS X 12.0.1 aarch64
println OperatingSystem.current().isMacOsX() // true
println System.getProperty("os.arch") // aarch64

installRoboRioToolchain tries to download an openSDK release that doesn't exist

Environment:
Debian 12 docker container (aarch64) running on M1 Mac.

Attempting to run ./gradlew installRoboRioToolchain fails when it attempts to download https://github.com/wpilibsuite/opensdk/releases/download/v2023-7/cortexa9_vfpv3-roborio-academic-2023-aarch64-linux-gnu-Toolchain-12.1.0.tgz which does not exist.

Looks like that release archive does indeed not exist, whereas it does for the v2023-6 openSDK release.

Not sure why the release for v2023-7 includes the Debian release name "bullseye" whereas the previous release contains no indication of a distro in the file name.

@ThadHouse for reference from discord discussion.

skipInvalidPlatforms set to false in json fails

> Exception thrown while executing model rule: frcUserProgram(org.gradle.nativeplatform.NativeExecutableSpec) { ... } @ build.gradle line 51, column 9 > all()
   > Could not create an instance of type edu.wpi.first.nativeutils.dependencies.DelegatedDependencySet.
      > Cannot invoke "edu.wpi.first.nativeutils.dependencies.ResolvedNativeDependency.getIncludeRoots()" because "resolved" is null

This happens if a platform is missing or incorrect in the json.

Generate debug info on macOS

A dSYM bundle contains all the debug information for a dylib. It can be generated by running dsymutil libxyz.dylib (the dylib contains a table of the original .o file locations. These object files contain the debug info; dsymutil extracts all of it and packages it into a bundle that we can include with our artifacts).

Gradle 4.3+ incompatibility with ExportsConfigRules

In Gradle 4.3, some of the properties for the compilation and link tasks changed:
https://docs.gradle.org/current/release-notes.html#changes-to-incubating-native-compile-and-link-tasks

This was done to support some improvements we're working on to add proper dependency management, build cache support and simplify the plugin programming model in the native domain.

This was raised as an issue at gradle/gradle#3413.

The type of objDir was File and now it's DirectoryProperty. This was relying on the toString representation to be the absolute path. For DirectoryProperty, the toString is value: D:\allwpilib\hal\build\objs\halSim\shared\halSimCpp.

Here's where objDir is defined:
https://github.com/wpilibsuite/native-utils/blob/master/src/main/groovy/edu/wpi/first/nativeutils/rules/ExportsConfigRules.groovy#L49

The quick and dirty fix is to call it.objectFileDir.get().getAsFile(), this will give you a File as before, but tie your plugins to Gradle 4.3+.

If I understand what you're doing in the plugin, I would suggest a few other things:

  1. It looks like generating the exports file should be a task of its own. Then you can wire compile <- export generator <- link and get some incremental goodness from it. You can find some more information about writing custom Gradle tasks in our Gradle Guides and how the new Provider types fit into things. Gradle 4.4 docs will include more information about this.
  2. I think it doesn't matter where the exports file is written, so you may consider just putting it in your own path (then you don't need to reach into the compile task).
  3. I don't know if you've noticed some of the new native work we're doing now. There are some simple samples you can look at. We would love to hear about any particular use cases you'd like Gradle to support as issues on the gradle-native board.

HTH

C++ dependencies eagerly download

The dependencies always try to download, even when trying to run a clean. Instead, they should only download when the download task is ran.

Resource generator C++ source includes redundant semicolon

It generates warnings like this:

> Task :sysid-projects:mechanism:compileEmbeddedBinaryReleaseStaticLibraryEmbeddedBinaryCpp
Warnings in file frcUserProgram.cpp ....
/home/runner/work/sysid/sysid/sysid-projects/mechanism/build/generated/exe/frcUserProgram.cpp:8:2: warning: extra ‘;’ [-Wpedantic]
    8 | };
      |  ^

It's probably this line:
https://github.com/wpilibsuite/native-utils/blob/main/src/main/java/edu/wpi/first/nativeutils/resources/ResourceGenerationAction.java#L54

Third party library import command refers to wrong folder

We are using IntelliJ to program and we ran into an issue with importing libraries. We used the command gradlew vendordep --url=FRCLOCAL/PathplannerLib.json to import the library when the json was installed in the directory :C\users\public\wpilib\**2023**\vendordeps, but the command prompt in intellij gave the error that it couldn't find the directory :C\users\public\wpilib\**frc2023**\vendordeps. The solution I came up with is to make a second folder in the wpilib folder called frc2023 with only the vendordeps in it, but it should be fixable that the command refers straight to the 2023 folder, right?

Recommend Projects

  • React photo React

    A declarative, efficient, and flexible JavaScript library for building user interfaces.

  • Vue.js photo Vue.js

    🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.

  • Typescript photo Typescript

    TypeScript is a superset of JavaScript that compiles to clean JavaScript output.

  • TensorFlow photo TensorFlow

    An Open Source Machine Learning Framework for Everyone

  • Django photo Django

    The Web framework for perfectionists with deadlines.

  • D3 photo D3

    Bring data to life with SVG, Canvas and HTML. 📊📈🎉

Recommend Topics

  • javascript

    JavaScript (JS) is a lightweight interpreted programming language with first-class functions.

  • web

    Some thing interesting about web. New door for the world.

  • server

    A server is a program made to process requests and deliver data to clients.

  • Machine learning

    Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.

  • Game

    Some thing interesting about game, make everyone happy.

Recommend Org

  • Facebook photo Facebook

    We are working to build community through open source technology. NB: members must have two-factor auth.

  • Microsoft photo Microsoft

    Open source projects and samples from Microsoft.

  • Google photo Google

    Google ❤️ Open Source for everyone.

  • D3 photo D3

    Data-Driven Documents codes.