add github action scripts
- moves python script to .github directory - also cleans up .gitignore
This commit is contained in:
parent
ec038c6059
commit
d3884a4e62
|
@ -3,8 +3,8 @@ import csv
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# files to merge
|
# files to merge
|
||||||
csv_file = 'website/data/library_services_information.csv'
|
csv_file = '../../website/data/library_services_information.csv'
|
||||||
topojson_file = 'website/data/boundaries.topo.json'
|
topojson_file = '../../website/data/boundaries.topo.json'
|
||||||
|
|
||||||
geo = open(topojson_file, 'r') # open the topo.json file
|
geo = open(topojson_file, 'r') # open the topo.json file
|
||||||
json_data = json.loads(geo.read()) # read the file and load into a dict
|
json_data = json.loads(geo.read()) # read the file and load into a dict
|
24
.github/workflows/merge-csv-to-topojson.yml
vendored
Normal file
24
.github/workflows/merge-csv-to-topojson.yml
vendored
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
name: Merge CSV to TopoJSON
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
paths:
|
||||||
|
- website/data/library_services_information.csv
|
||||||
|
jobs:
|
||||||
|
mergeData:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Merge CSV to TopoJSON
|
||||||
|
run: ./github/scripts/merge_csv_to_topojson.py
|
||||||
|
shell: python3
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@v3
|
||||||
|
with:
|
||||||
|
commit-message: update TopoJSON
|
||||||
|
title: Update TopoJSON
|
||||||
|
body: Update boundaries.topo.json with new data from library_services_information.csv
|
||||||
|
branch: auto-update-topo
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1,4 +1,2 @@
|
||||||
*.xlsx
|
|
||||||
website/data/all_library_services.geojson
|
|
||||||
website/data/boundaries.geojson
|
website/data/boundaries.geojson
|
||||||
website/secrets.js
|
website/secrets.js
|
Loading…
Reference in a new issue