Skip to content

Commit

Permalink
Add tokenizers module
Browse files Browse the repository at this point in the history
  • Loading branch information
AntonOresten committed Nov 22, 2024
1 parent 900efda commit 00d2b04
Show file tree
Hide file tree
Showing 10 changed files with 19 additions and 87 deletions.
38 changes: 0 additions & 38 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ jobs:
matrix:
version:
- '1.11'
- '1.6'
- 'pre'
os:
- ubuntu-latest
Expand All @@ -39,40 +38,3 @@ jobs:
- uses: julia-actions/cache@v2
- uses: julia-actions/julia-buildpkg@v1
- uses: julia-actions/julia-runtest@v1
- uses: julia-actions/julia-processcoverage@v1
- uses: codecov/codecov-action@v4
with:
files: lcov.info
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: false
docs:
name: Documentation
runs-on: ubuntu-latest
permissions:
actions: write # needed to allow julia-actions/cache to proactively delete old caches that it has created
contents: write
statuses: write
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v2
with:
version: '1'
- uses: julia-actions/cache@v2
- name: Configure doc environment
shell: julia --project=docs --color=yes {0}
run: |
using Pkg
Pkg.develop(PackageSpec(path=pwd()))
Pkg.instantiate()
- uses: julia-actions/julia-buildpkg@v1
- uses: julia-actions/julia-docdeploy@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCUMENTER_KEY: ${{ secrets.DOCUMENTER_KEY }}
- name: Run doctests
shell: julia --project=docs --color=yes {0}
run: |
using Documenter: DocMeta, doctest
using HuggingFaceTokenizers
DocMeta.setdocmeta!(HuggingFaceTokenizers, :DocTestSetup, :(using HuggingFaceTokenizers); recursive=true)
doctest(HuggingFaceTokenizers)
5 changes: 2 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
*.jl.*.cov
*.jl.cov
*.jl.mem
/Manifest.toml
/docs/Manifest.toml
/docs/build/
Manifest.toml
.CondaPkg
2 changes: 2 additions & 0 deletions CondaPkg.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[deps]
tokenizers = ""
6 changes: 5 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,12 @@ uuid = "a6888d44-1185-43bb-bd0f-7806f9976d18"
authors = ["AntonOresten <anton.oresten42@gmail.com> and contributors"]
version = "1.0.0-DEV"

[deps]
PythonCall = "6099a3de-0909-46bc-b1f4-468b9a2dfc0d"

[compat]
julia = "1.6.7"
PythonCall = "0.9.23"
julia = "1"

[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Expand Down
3 changes: 0 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
# HuggingFaceTokenizers

[![Stable](https://img.shields.io/badge/docs-stable-blue.svg)](https://AntonOresten.github.io/HuggingFaceTokenizers.jl/stable/)
[![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://AntonOresten.github.io/HuggingFaceTokenizers.jl/dev/)
[![Build Status](https://github.com/AntonOresten/HuggingFaceTokenizers.jl/actions/workflows/CI.yml/badge.svg?branch=main)](https://github.com/AntonOresten/HuggingFaceTokenizers.jl/actions/workflows/CI.yml?query=branch%3Amain)
[![Coverage](https://codecov.io/gh/AntonOresten/HuggingFaceTokenizers.jl/branch/main/graph/badge.svg)](https://codecov.io/gh/AntonOresten/HuggingFaceTokenizers.jl)
3 changes: 0 additions & 3 deletions docs/Project.toml

This file was deleted.

23 changes: 0 additions & 23 deletions docs/make.jl

This file was deleted.

14 changes: 0 additions & 14 deletions docs/src/index.md

This file was deleted.

8 changes: 7 additions & 1 deletion src/HuggingFaceTokenizers.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
module HuggingFaceTokenizers

# Write your package code here.
using PythonCall

const tokenizers = Ref{Py}()

function __init__()
tokenizers[] = pyimport("tokenizers")
end

end
4 changes: 3 additions & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,7 @@ using HuggingFaceTokenizers
using Test

@testset "HuggingFaceTokenizers.jl" begin
# Write your tests here.



end

0 comments on commit 00d2b04

Please sign in to comment.