Zima Weather: Localization Setup

Zima Weather: Localization Setup

The OpenWeatherMap API behind Zima Weather support many languages out of the box. I wanted to make Zima support all these languages as well throughout the UI, but that would mean I need to translate each new phrase into a set of 30+ languages, a daunting task if done manually. Luckily, Google also provides a Cloud Translate API with a free tier up to 500,000 characters, which covered all my needs.

Frontend setup

On the frontend, I decided to go with the i18next library for translations. i18nextis a well-known internationalization framework for JavaScript. It has a Β comprehensive ecosystem, plugins, and extensibility. For this setup to work, we have the i18next-http-backend plugin that can load translations directly from json files on the server.

Here is how I setup the library in Zima:

i18next
    .use(HttpApi)
    .init({
        detection: {
            order: ['querystring', 'navigator'],
            lookupQuerystring: 'lang'
        },
        backend: {
            loadPath: '/locales/{{lng}}/{{ns}}.json'
        },
        load: 'languageOnly',
        fallbackLng: 'en'
    })
    .then(function (t) {
        // Find all elements with a `data-i18n` attribute
        const elements = document.querySelectorAll('[data-i18n]');

        // Loop over them and set their text content to the translated string
        elements.forEach(function (element) {
            const key = element.getAttribute('data-i18n');
            element.textContent = i18next.t(key);
        });
    });

In this setup, language is detected first from a query string (I use it to test any language), and then from the browser, and loaded from the corresponding path on the backend.

I use languageOnly option to not produce HTTP 404 errors since i18next by default will try to load language with country (e.g., "en-US" and not just "en").

A translation is then applied to each html element with the data-i18n property that defines a key from the json file, e.g. <span data-i18n="rate_modal.okay">Okay</span>

The translation directory structure should look like the following:

πŸ“‚ locales
β”œβ”€β”€ πŸ“‚ en
β”‚   └── πŸ“œ translation.json
β”œβ”€β”€ πŸ“‚ de
β”‚   └── πŸ“œ translation.json
β”œβ”€β”€ πŸ“‚ fr
β”‚   └── πŸ“œ translation.json

Create directory structure

First we write a simple Node.js script to generate empty translation.json files in all language directories that we need:

const fs = require('fs');
const path = require('path');

const languages = ['en', 'fr', 'de', 'es', 'it', 'nl', 'pt', 'ru', 'ja', 'zh']; // Add or remove languages as needed

languages.forEach((lang) => {
    const dirPath = path.join(__dirname, lang);

    if (!fs.existsSync(dirPath)) {
        fs.mkdirSync(dirPath, { recursive: true });
        fs.writeFileSync(path.join(dirPath, 'translation.json'), '{}', 'utf-8');
    } else {
        console.log(`Directory ${lang} already exists!`);
    }
});

console.log('Directories and translation files have been created!');

Add translation script

And here is the Node.js script that will add translations to add languages that we have created above:

const fs = require('fs');
const path = require('path');
const https = require('https');
const _ = require('lodash');

function translateText(text, targetLanguage, api_key) {
    return new Promise((resolve, reject) => {
        const url = `https://translation.googleapis.com/language/translate/v2?key=${api_key}&q=${encodeURIComponent(
            text
        )}&target=${targetLanguage}`;

        https.get(url, (res) => {
            let data = '';

            res.on('data', (chunk) => {
                data += chunk;
            });

            res.on('end', () => {
                const result = JSON.parse(data);
                if (result.error) {
                    reject(result.error);
                } else {
                    resolve(result.data.translations[0].translatedText);
                }
            });
        }).on('error', (err) => {
            reject(err.message);
        });
    });
}

async function updateTranslationFiles(rootDir, translationId, translationText, api_key) {
    const languageDirs = fs.readdirSync(rootDir);

    for (const languageDir of languageDirs) {
        const langPath = path.join(rootDir, languageDir);
        if (fs.lstatSync(langPath).isDirectory()) {
            const localizationFile = path.join(langPath, 'translation.json');

            const localizationData = JSON.parse(fs.readFileSync(localizationFile, 'utf8'));

            if (languageDir !== 'en') {
                _.set(localizationData, translationId, await translateText(translationText, languageDir, api_key));
            } else {
                _.set(localizationData, translationId, translationText);
            }

            fs.writeFileSync(localizationFile, JSON.stringify(localizationData, null, 2));
        }
    }
}


(async () => {
    const rootDir = './src/locales'; // Change this to the correct relative path
    const translationId = process.argv[2];
    const translationText = process.argv[3];
    const api_key = process.argv[4];

    if (!translationId || !translationText) {
        console.error('Please provide both translation ID and text as arguments');
        process.exit(1);
    }

    await updateTranslationFiles(rootDir, translationId, translationText, api_key);
})();

The script uses Google Cloud Translate API to get translations for all languages other than English, and overrides translations if they already exist for a given json key. It should be called as following:

node add_translation.js "rate_modal.okay" "Okay" API_KEY

  • First argument is a json key for which you want to add the translation,
  • Second is the phrase to translate,
  • Third is the Google API key.

I am still using ChatGPT to refine the Google-generated translations from time to time, but this is still a very robust setup that you can use to add translations to multiple languages at once.