Merge pull request #8 from beatriceo/front-end

changed pages/home to pages/call
This commit is contained in:
Beatrice Olivera 2018-08-28 13:49:11 +01:00 committed by GitHub
commit e73df2ed65
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 199 additions and 5 deletions

View File

@ -194,3 +194,153 @@ const exchange = data => {
};
const logError = error => console.warn("Whoops! Error:", error);
// Google Cloud Speech Playground with node.js and socket.io
// Created by Vinzenz Aubry for sansho 24.01.17
// Feel esee to improve!
// Contact: vinzenz@sansho.studio
const express = require('express'); // const bodyParser = require('body-parser'); // const path = require('path');
const fs = require('fs');
const environmentVars = require('dotenv').config();
// Google Cloud
const speech = require('@google-cloud/speech');
const speechClient = new speech.SpeechClient(); // Creates a client
const Translate = require('@google-cloud/translate');
const projectId = 'booming-banner-212315';
const translate = new Translate({
projectId: projectId,
});
const target = 'en';
const app = express();
const port = process.env.PORT || 1337;
const server = require('http').createServer(app);
const io = require('socket.io')(server);
app.use('/assets', express.static(__dirname + '/public'));
app.use('/session/assets', express.static(__dirname + '/public'));
app.set('view engine', 'ejs');
// =========================== ROUTERS ================================ //
app.get('/', function (req, res) {
res.render('index', {});
});
app.use('/', function (req, res, next) {
next(); // console.log(`Request Url: ${req.url}`);
});
// =========================== SOCKET.IO ================================ //
io.on('connection', function (client) {
console.log('Client Connected to server');
let recognizeStream = null;
client.on('join', function (data) {
client.emit('messages', 'Socket Connected to Server');
});
client.on('messages', function (data) {
client.emit('broad', data);
});
client.on('startGoogleCloudStream', function (data) {
startRecognitionStream(this, data);
});
client.on('endGoogleCloudStream', function (data) {
stopRecognitionStream();
});
client.on('binaryData', function (data) {
// console.log(data); //log binary data
if (recognizeStream !== null) {
recognizeStream.write(data);
}
});
function startRecognitionStream(client, data) {
recognizeStream = speechClient.streamingRecognize(request)
.on('error', console.error)
.on('data', (data) => {
process.stdout.write(
(data.results[0] && data.results[0].alternatives[0])
? `Transcription: ${data.results[0].alternatives[0].transcript}\n`
: `\n\nReached transcription time limit, press Ctrl+C\n`);
client.emit('speechData', data);
if (data.results[0].alternatives[0] !== undefined) {
let text = data.results[0].alternatives[0].transcript
translate
.translate(text, target)
.then(results => {
const translation = results[0];
client.emit('translateData', translation)
console.log(`Text: ${text}`);
console.log(`Translation: ${translation}`);
})
.catch(err => {
console.error('ERROR:', err);
});
}
// if end of utterance, let's restart stream
// this is a small hack. After 65 seconds of silence, the stream will still throw an error for speech length limit
if (data.results[0] && data.results[0].isFinal) {
stopRecognitionStream();
startRecognitionStream(client);
// console.log('restarted stream serverside');
}
});
}
function stopRecognitionStream() {
if (recognizeStream) {
recognizeStream.end();
}
recognizeStream = null;
}
});
// =========================== GOOGLE CLOUD SETTINGS ================================ //
// The encoding of the audio file, e.g. 'LINEAR16'
// The sample rate of the audio file in hertz, e.g. 16000
// The BCP-47 language code to use, e.g. 'en-US'
const encoding = 'LINEAR16';
const sampleRateHertz = 16000;
const languageCode = 'fr-FR'; //en-US
const request = {
config: {
encoding: encoding,
sampleRateHertz: sampleRateHertz,
languageCode: languageCode,
profanityFilter: false,
enableWordTimeOffsets: true
},
interimResults: true // If you want interim results, set this to true
};
// =========================== START SERVER ================================ //
server.listen(port, "127.0.0.1", function () { //http listen, to make socket work
// app.address = "127.0.0.1";
console.log('Server started on port:' + port)
});

View File

@ -12,5 +12,5 @@ $light-gray: #F4F4F4;
$primary: #F55E4F;
$secondary: #5ED17E;
$background: #33333D;
$card-backround: #464650;
$card-background: #464650;
$navbar-background: #1F1F29;

View File

@ -0,0 +1,17 @@
.card {
display: flex;
justify-content: space-between;
background-color: $card-background;
.profile {
}
.call {
transform: scaleX(-1);
-moz-transform: scaleX(-1);
-webkit-transform: scaleX(-1);
-ms-transform: scaleX(-1);
}
}

View File

@ -1,6 +1,6 @@
class PagesController < ApplicationController
skip_before_action :authenticate_user!, only: [:home]
skip_before_action :authenticate_user!, only: [:call]
def home
def call
end
end

View File

@ -0,0 +1,12 @@
<div class="card">
<div class="profile img-circle">
<img src="https://kitt.lewagon.com/placeholder/users/ssaunier" alt="">
</div>
<div class="info">
<h3>FirstName LastName</h3>
<p>Last call: <%= Date.now %></p>
</div>
<div class="call">
<i class="fas fa-phone"></i>
</div>
</div>

View File

@ -0,0 +1,15 @@
<div class="container">
<div class="contacts">
<h2>Contacts</h2>
<% 5.times do %>
<%= render "layouts/contact" %>
<% end %>
</div>
<div class="video-feed">
<video src=""></video>
</div>
</div>

View File

@ -1,8 +1,8 @@
Rails.application.routes.draw do
get 'video_sessions/create'
devise_for :users
root to: 'pages#home'
root to: 'pages#call'
get '/contacts', to: 'users#index'
post '/sessions', to: 'video_sessions#create'
mount ActionCable.server, at: '/cable'