Skip to content

Commit

Permalink
append new rows to xisting file + commit link
Browse files Browse the repository at this point in the history
fixes #68 #57
  • Loading branch information
paulgirard committed Oct 16, 2019
1 parent 1af8672 commit c75db14
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 13 deletions.
2 changes: 1 addition & 1 deletion importApp/src/config/default.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"repoRawContent": "https://raw.githubusercontent.com/medialab/ricardo_data/",
"repoRawContent": "https://raw.githubusercontent.com/medialab/ricardo_data",
"apiUri": "https://api.github.com/repos/medialab/ricardo_data/contents",
"branchUri": "https://api.github.com/repos/medialab/ricardo_data/branches",
"referenceUri": "https://api.github.com/repos/medialab/ricardo_data/git/refs",
Expand Down
12 changes: 6 additions & 6 deletions importApp/src/containers/DataPublish/DataPublish.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import {downloadFlow, downloadTable} from '../../utils/fileExporter';
import GithubAuthModal from '../../components/GithubAuthModal';

import {SOURCE_SLUG_FILENAME} from '../../constants';
import {owner, repoName} from '../../config/default';


class DataPublish extends React.Component {
Expand All @@ -45,7 +46,7 @@ class DataPublish extends React.Component {

render () {
const {flows, repoData, referenceTables, originalLength} = this.props;
const {selectedBranch, remoteUpdateStatus} = repoData;
const {selectedBranch, remoteUpdateStatus, lastCommit, remoteUpdateMessage} = repoData;
const repoTables = repoData.tables;

let updatedTables = [];
Expand All @@ -68,16 +69,15 @@ class DataPublish extends React.Component {
// we need to source metadata to generate source filename
const sources = keyBy(referenceTables.sources, s => s.slug);
const parsedFlows = csvParse(flows.data.map(d => d.join(',')).join('\n'));
console.log(sources);
const groupedFlows = groupBy(parsedFlows, (item) => SOURCE_SLUG_FILENAME(sources[item['source']]));
console.log(groupedFlows);


const handleUpdateRemoteFiles= (auth) => {
this.handleCloseModal();

const flowFiles = Object.keys(groupedFlows).map((file) => {
return {
fileName: `${file}.csv`,
fileName: `flows/${file}.csv`,
data: groupedFlows[file]
}
});
Expand Down Expand Up @@ -142,8 +142,8 @@ class DataPublish extends React.Component {
</Control>
</Field>
<Field>
{remoteUpdateStatus === 'loading' && <Help isColor='success'>updating files on github, please wait...</Help>}
{remoteUpdateStatus === 'updated' && <Help isColor='success'>files are updated on github</Help>}
{remoteUpdateStatus === 'loading' && <Help isColor='success'>updating files on github: {remoteUpdateMessage}...</Help>}
{remoteUpdateStatus === 'updated' && <Help isColor='success'>files have been commited on github see <a href={`https://github.com/${owner}/${repoName}/commit/${lastCommit}`}>commit details</a></Help>}
{remoteUpdateStatus === 'fail' && <Help isColor='danger'>fail to update files on github</Help>}
</Field>
<GithubAuthModal
Expand Down
48 changes: 42 additions & 6 deletions importApp/src/redux/modules/repoData.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ import Octokat from 'octokat';

import { Base64 } from 'js-base64';
import {
csvFormat
csvFormat,
csvParse
} from 'd3-dsv';

import {INIT_TABLES} from './referenceTables';
import { SET_STEP } from './ui';

export const FETCH_TABLE_REQUEST = 'FETCH_TABLE_REQUEST';
export const FETCH_TABLE_SUCCESS = 'FETCH_TABLE_SUCCESS';
Expand Down Expand Up @@ -39,6 +39,7 @@ export const LOGIN_CREATE_BRANCH_SUCCESS = 'LOGIN_CREATE_BRANCH_SUCCESS';
export const LOGIN_CREATE_BRANCH_FAILURE = 'LOGIN_CREATE_BRANCH_FAILURE';

export const UPDATE_REMOTE_FILES_REQUEST = 'UPDATE_REMOTE_FILES_REQUEST';
export const UPDATE_REMOTE_FILES_LOG = 'UPDATE_REMOTE_FILES_LOG';
export const UPDATE_REMOTE_FILES_SUCCESS = 'UPDATE_REMOTE_FILES_SUCCESS';
export const UPDATE_REMOTE_FILES_FAILURE = 'UPDATE_REMOTE_FILES_FAILURE';

Expand Down Expand Up @@ -147,8 +148,7 @@ export const updateRemoteFiles = (payload) => (dispatch) => {
const {files, branch, auth} = payload;

const github = new Octokat({
username: auth.username,
password: auth.token
token: auth.token
});

dispatch(async () => {
Expand All @@ -157,6 +157,25 @@ export const updateRemoteFiles = (payload) => (dispatch) => {
let baseReference = await repo.git.refs(`heads/${branch}`).fetch();
let treeItems = [];
for (let file of files) {

if (!file.sha && file.fileName.includes('flows')){
//new file flow ?
//check if file already exists
dispatch({
type: UPDATE_REMOTE_FILES_LOG,
payload: `downloading existing flows file ${file.fileName}`
});
const exists = await get(`${repoRawContent}/${branch}/data/${file.fileName}`,{ responseType: 'text', responseEncoding: 'utf8'})
if (exists.status === 200) {
// append new rows at end of the existing file
file.data = csvParse(exists.data).concat(file.data)
}
// else it's a new file nothing to do
}
dispatch({
type: UPDATE_REMOTE_FILES_LOG,
payload: `uploading ${file.fileName}`
});
let fileGit = await repo.git.blobs.create({content: Base64.encode(csvFormat(file.data)), encoding: 'base64'});
let filePath = `data/${file.fileName}`;
treeItems.push({
Expand All @@ -166,11 +185,18 @@ export const updateRemoteFiles = (payload) => (dispatch) => {
type: "blob"
})
}

dispatch({
type: UPDATE_REMOTE_FILES_LOG,
payload: `creating tree`
});
let tree = await repo.git.trees.create({
tree: treeItems,
base_tree: baseReference.object.sha
});
dispatch({
type: UPDATE_REMOTE_FILES_LOG,
payload: `creating commit`
});
let commit = await repo.git.commits.create({
message: auth.message || DEFAULT_MESSAGE,
tree: tree.sha,
Expand All @@ -180,6 +206,7 @@ export const updateRemoteFiles = (payload) => (dispatch) => {
baseReference.update({sha: commit.sha});
dispatch({
type: UPDATE_REMOTE_FILES_SUCCESS,
payload: commit.sha
});
} catch(err) {
console.error(err);
Expand Down Expand Up @@ -333,12 +360,21 @@ export default function reducer(state = initialState, action){
case UPDATE_REMOTE_FILES_REQUEST:
return {
...state,
remoteUpdateStatus: 'loading'
remoteUpdateStatus: 'loading',
lastCommit: null,
remoteUpdateMessage: null
}
case UPDATE_REMOTE_FILES_LOG:
return {
...state,
remoteUpdateStatus: 'loading',
remoteUpdateMessage: payload
}
case UPDATE_REMOTE_FILES_SUCCESS:
return {
...state,
remoteUpdateStatus: "updated",
lastCommit: payload
}
case UPDATE_REMOTE_FILES_FAILURE:
return {
Expand Down

0 comments on commit c75db14

Please sign in to comment.