Error converting string to int vectors

advertisements

There are two errors in the convert function saying that conversion to char doesn't exist. I am attempting to convert the string vectors into int vectors, but this char business keeps ruining everything :(. I tried casting (char) in front of the error but to no avail. Any help/suggestions appreciated.

Also, according to my lab rules, I cannot change anything inside my asciiToInt function. And the system("pause") is because I am using microsoft visual studio.

#include <iostream>
#include <vector>
#include <string>

using namespace std;

void input(vector<string> &largeString1, vector<string> &largeString2);
void convert(vector<string> largeString1, vector<string> largeString2, vector<int> &largeInt1, vector<int> &largeInt2);
int asciiToInt(char ch);
void add(vector<int> largeInt1, vector<int> largeInt2, vector<int> &finalInt);
void output(const vector<int> finalInt);

int main()
{
    vector<string> largeString1;
    vector<string> largeString2;
    vector<int> largeInt1(12, 0);
    vector<int> largeInt2(12, 0);
    vector<int> finalInt(13, 0);

    for (int i = 0; i < 4; i++)
    {
        input(largeString1, largeString2);
        convert(largeString1, largeString2, largeInt1, largeInt2);
        add(largeInt1, largeInt2, finalInt);
        output(finalInt);
    }

    system("pause");

    return 0;
}
void input(vector<string> &largeString1, vector<string> &largeString2)
{
    cout << "Input:" << endl << endl;
    for (int i = 0; i < 12; i++)
        cin >> largeString1[i];
    for (int j = 0; j < 12; j++)
        cin >> largeString2[j];
}
void convert(vector<string> largeString1, vector<string> largeString2, vector<int> &largeInt1, vector<int> &largeInt2)
{
    int size1 = size(largeString1);
    int size2 = size(largeString2);
    for (int i = 0; i < 12; i++)
    {
        int dynamicsize1 = size1 - i;
        largeInt1[11 - i] = asciiToInt(largeString1[dynamicsize1]);
    }
    for (int j = 0; j < 12; j++)
    {
        int dynamicsize2 = size2 - j;
        largeInt2[11 - j] = asciiToInt(largeString2[dynamicsize2]);
    }
}
int asciiToInt(char ch)
{
    return (ch - '0');
}
void add(vector<int> largeInt1, vector<int> largeInt2, vector<int> &finalInt)
{
    for (int i = 0; i < 13; i++)
    {
        finalInt[12 - i] = largeInt1[11 - i] + largeInt2[11 - i];
    }
}
void output(const vector<int> finalInt)
{
    cout << endl << "Output:" << endl << endl << "The sum is: ";
    for (int i = 0; i < 13; i++)
    {
        cout << finalInt[i];
    }
}


I think your goal is to read two large numbers which represented by two strings and then calculate the sum of them.

First problem: Here you don't want a vector<string> for largeStringX, what you need is a string (or a vector<char>). Because string is a container that contains characters (i.e. char), while vector<string> is a container that contains some strings.

Second problem: To acquire the size (length) of the string, instead of size(someString), you should use someString.size().