#include <iostream>
using namespace std;
#define CHANGE 0.5
#define YMAX (yourterminallength/(2*(1/CHANGE)))
#define YMIN (-yourterminallength/(2*(1/CHANGE)))
#define XMAX (yourterminalwidth/(2*(1/CHANGE)))
#define XMIN (-yourterminalwidth/(2*(1/CHANGE)))
int main()
{
float m,b,y,x,c;
cout << "Intervals:" << CHANGE << " \tYMAX: " << YMAX << "\tYMIN: " << YMIN << "\tXMAX: " << XMAX << "\tXMIN: " << XMIN;
cout << "\nEnter any key to continue...";
cout << "nEnter a linear equation in slope-intercept form: y = Mx + B";
cout << "\nM=";
cin >> m;
cout << "B=";
cin >> b;
c=YMAX;
while(c>YMIN)
{
x=XMIN;
while(x<XMAX)
{
if(c==0)
{
if(x==0)
cout << char(197);
else
cout << char(196);
}
else if(x==0)
cout << char(179);
else if((m*x+b)==c)
cout << char(254);
else
cout << char(176);
x+=CHANGE;
}
c-=CHANGE;
}
return 0;
}
The program is meant to graph simple linear equations. It has worked fine on my Windows computer and I simply made a copy of it to run on my Ubuntu OS, but the outcome looks funny. I know I am not giving much information, but I have no idea what is wrong with this.